<?php
require 'examples/boot.php';
use Cognesy\Instructor\StructuredOutput;
use Cognesy\Instructor\StructuredOutputRuntime;
use Cognesy\Polyglot\Inference\LLMProvider;
enum Role : string {
case CEO = 'ceo';
case CTO = 'cto';
case Developer = 'developer';
case Other = 'other';
}
class UserDetail
{
public string $name;
public Role $role;
public int $age;
}
$runtime = StructuredOutputRuntime::fromProvider(LLMProvider::using('openai'))
->wiretap(fn($event) => $event->print());
$user = (new StructuredOutput($runtime))
->with(
messages: [["role" => "user", "content" => "Contact our CTO, Jason is 28 years old -- Best regards, Tom"]],
responseModel: UserDetail::class,
options: ['stream' => true]
)
->get();
dump($user);
assert($user->name === "Jason");
assert($user->role === Role::CTO);
assert($user->age === 28);
?>