<?php
use Cognesy\Instructor\StructuredOutput;
use Cognesy\Instructor\StructuredOutputRuntime;
use Cognesy\Polyglot\Inference\Config\LLMConfig;
use Cognesy\Polyglot\Inference\LLMProvider;
// Create instance of OpenAI client initialized with custom parameters
$config = new LLMConfig(
apiUrl: 'https://api.openai.com/v1',
apiKey: $yourApiKey,
endpoint: '/chat/completions',
metadata: ['organization' => ''],
model: 'gpt-4o-mini',
maxTokens: 128,
// configure HTTP via HttpClientBuilder or facade-level methods
driver: 'openai',
);
// Get Instructor with the default configuration overridden with your own
$runtime = StructuredOutputRuntime::fromProvider(
provider: LLMProvider::new()->withLLMConfig($config),
);
$structuredOutput = new StructuredOutput($runtime);
$person = $structuredOutput->with(
messages: [['role' => 'user', 'content' => $text]],
responseModel: Person::class,
options: ['temperature' => 0.0],
)->get();
// @doctest id="d2aa"