You can connect to Azure OpenAI instance using a dedicated client provided
by Instructor. Please note it requires setting up your own model deployment
using Azure OpenAI service console.
<?phpuseCognesy\Polyglot\LLM\Inference;useCognesy\Utils\Str;require'examples/boot.php';$answer=(newInference)->withConnection('openai')// see /config/llm.php->create( messages:[['role'=>'user','content'=>'What is the capital of France']], options:['max_tokens'=>64])->toText();echo"USER: What is capital of France\n";echo"ASSISTANT: $answer\n";assert(Str::contains($answer,'Paris'));?>