Skip to main content

Overview

This example demonstrates an edge adapter that reads raw config arrays from a custom source and maps them into typed config objects used by InferenceRuntime.

Example

<?php
require 'examples/boot.php';

use Adbar\Dot;
use Cognesy\Config\Env;
use Cognesy\Events\Dispatchers\EventDispatcher;
use Cognesy\Events\Event;
use Cognesy\Http\Config\HttpClientConfig;
use Cognesy\Http\Creation\HttpClientBuilder;
use Cognesy\Polyglot\Inference\Config\LLMConfig;
use Cognesy\Polyglot\Inference\Inference;
use Cognesy\Polyglot\Inference\InferenceRuntime;
use Cognesy\Polyglot\Inference\LLMProvider;
use Cognesy\Utils\Str;
use Symfony\Component\HttpClient\HttpClient as SymfonyHttpClient;

final class CustomConfigSource
{
    private Dot $dot;

    public function __construct(array $data = []) {
        $this->dot = new Dot($data);
    }

    public function llmConnection(string $name): array {
        $value = $this->dot->get("llm.connections.$name");
        if (!is_array($value)) {
            throw new RuntimeException("Unknown LLM connection: $name");
        }
        return $value;
    }
}

$configSource = new CustomConfigSource([
    'llm' => [
        'connections' => [
            'deepseek' => [
                'driver' => 'deepseek',
                'apiUrl' => 'https://api.deepseek.com',
                'apiKey' => (string) Env::get('DEEPSEEK_API_KEY', ''),
                'endpoint' => '/chat/completions',
                'model' => 'deepseek-chat',
                'maxTokens' => 128,
            ],
            'openai' => [
                'driver' => 'openai',
                'apiUrl' => 'https://api.openai.com/v1',
                'apiKey' => (string) Env::get('OPENAI_API_KEY', ''),
                'endpoint' => '/chat/completions',
                'model' => 'gpt-4.1-nano',
                'maxTokens' => 256,
            ],
        ],
    ],
]);

$connection = match (true) {
    (string) Env::get('DEEPSEEK_API_KEY', '') !== '' => 'deepseek',
    (string) Env::get('OPENAI_API_KEY', '') !== '' => 'openai',
    default => throw new RuntimeException('Set DEEPSEEK_API_KEY or OPENAI_API_KEY in your environment to run this example.'),
};

$events = new EventDispatcher();
$httpClient = (new HttpClientBuilder(events: $events))
    ->withConfig(new HttpClientConfig(driver: 'symfony'))
    ->withClientInstance(
        driverName: 'symfony',
        clientInstance: SymfonyHttpClient::create(['http_version' => '2.0']),
    )
    ->create();

$llmConfig = LLMConfig::fromArray($configSource->llmConnection($connection));
$provider = LLMProvider::fromLLMConfig($llmConfig);
$runtime = InferenceRuntime::fromProvider(
    provider: $provider,
    events: $events,
    httpClient: $httpClient,
);

$events->addListener(Event::class, fn(Event $e) => $e->print());

$answer = Inference::fromRuntime($runtime)
    ->withMessages([['role' => 'user', 'content' => 'What is the capital of France']])
    ->withMaxTokens(256)
    ->withStreaming()
    ->get();

echo "USER: What is capital of France\n";
echo "ASSISTANT: $answer\n";

assert(Str::contains($answer, 'Paris'));
?>