diff --git a/docs/cookbook/examples/extras/llm.mdx b/docs/cookbook/examples/extras/llm.mdx index 2286ffb1..86af3b9f 100644 --- a/docs/cookbook/examples/extras/llm.mdx +++ b/docs/cookbook/examples/extras/llm.mdx @@ -23,7 +23,9 @@ $loader->add('Cognesy\\Instructor\\', __DIR__ . '../../src/'); use Cognesy\Instructor\Extras\LLM\Inference; use Cognesy\Instructor\Utils\Str; -// simplified API, default connection for convenient ad-hoc calls + + +// EXAMPLE 1: simplified API, default connection for convenient ad-hoc calls $answer = Inference::text('What is capital of Germany'); echo "USER: What is capital of Germany\n"; @@ -31,7 +33,9 @@ echo "ASSISTANT: $answer\n"; assert(Str::contains($answer, 'Berlin')); -// regular API, allows to customize inference options + + +// EXAMPLE 2: regular API, allows to customize inference options $answer = (new Inference) ->withConnection('openai') // optional, default is set in /config/llm.php ->create( @@ -44,7 +48,10 @@ echo "USER: What is capital of France\n"; echo "ASSISTANT: $answer\n"; assert(Str::contains($answer, 'Paris')); -// regular API, allows to customize inference options + + + +// EXAMPLE 3: streaming response $stream = (new Inference) ->create( messages: [['role' => 'user', 'content' => 'Describe capital of Brasil']], diff --git a/docs/cookbook/examples/extras/llm_json.mdx b/docs/cookbook/examples/extras/llm_json.mdx index 01cb2f7b..cfda05cd 100644 --- a/docs/cookbook/examples/extras/llm_json.mdx +++ b/docs/cookbook/examples/extras/llm_json.mdx @@ -35,7 +35,6 @@ $loader->add('Cognesy\\Instructor\\', __DIR__ . '../../src/'); use Cognesy\Instructor\Enums\Mode; use Cognesy\Instructor\Extras\LLM\Inference; -// regular API, allows to customize inference options $data = (new Inference) ->withConnection('openai') // optional, default is set in /config/llm.php ->create( diff --git a/docs/cookbook/examples/extras/llm_json_schema.mdx b/docs/cookbook/examples/extras/llm_json_schema.mdx index 28281db3..af47c397 100644 --- a/docs/cookbook/examples/extras/llm_json_schema.mdx +++ b/docs/cookbook/examples/extras/llm_json_schema.mdx @@ -26,7 +26,6 @@ $loader->add('Cognesy\\Instructor\\', __DIR__ . '../../src/'); use Cognesy\Instructor\Enums\Mode;use Cognesy\Instructor\Extras\LLM\Inference; -// regular API, allows to customize inference options $data = (new Inference) ->withConnection('openai') ->create( diff --git a/docs/cookbook/examples/extras/llm_md_json.mdx b/docs/cookbook/examples/extras/llm_md_json.mdx index ab6f3558..28de98ee 100644 --- a/docs/cookbook/examples/extras/llm_md_json.mdx +++ b/docs/cookbook/examples/extras/llm_md_json.mdx @@ -28,7 +28,6 @@ $loader->add('Cognesy\\Instructor\\', __DIR__ . '../../src/'); use Cognesy\Instructor\Enums\Mode;use Cognesy\Instructor\Extras\LLM\Inference; -// regular API, allows to customize inference options $data = (new Inference) ->withConnection('openai') ->create( diff --git a/docs/cookbook/examples/extras/llm_tools.mdx b/docs/cookbook/examples/extras/llm_tools.mdx index ba09bc95..63379d13 100644 --- a/docs/cookbook/examples/extras/llm_tools.mdx +++ b/docs/cookbook/examples/extras/llm_tools.mdx @@ -24,7 +24,6 @@ $loader->add('Cognesy\\Instructor\\', __DIR__ . '../../src/'); use Cognesy\Instructor\Enums\Mode; use Cognesy\Instructor\Extras\LLM\Inference; -// regular API, allows to customize inference options $data = (new Inference) ->withConnection('openai') ->create( diff --git a/docs/cookbook/examples/extras/schema.mdx b/docs/cookbook/examples/extras/schema.mdx index b2787b69..3470a91a 100644 --- a/docs/cookbook/examples/extras/schema.mdx +++ b/docs/cookbook/examples/extras/schema.mdx @@ -27,7 +27,6 @@ class City { $schema = (new SchemaFactory)->schema(City::class); -// regular API, allows to customize inference options $data = (new Inference) ->withConnection('openai') ->create( diff --git a/docs/mint.json b/docs/mint.json index c7a33ff2..a9058be9 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -202,6 +202,7 @@ "cookbook/examples/extras/llm_md_json", "cookbook/examples/extras/llm_tools", "cookbook/examples/extras/schema", + "cookbook/examples/extras/schema", "cookbook/examples/extras/transcription_to_tasks", "cookbook/examples/extras/translate_ui_fields", "cookbook/examples/extras/web_to_objects" diff --git a/examples/A05_Extras/LLM/run.php b/examples/A05_Extras/LLM/run.php index 2286ffb1..86af3b9f 100644 --- a/examples/A05_Extras/LLM/run.php +++ b/examples/A05_Extras/LLM/run.php @@ -23,7 +23,9 @@ use Cognesy\Instructor\Extras\LLM\Inference; use Cognesy\Instructor\Utils\Str; -// simplified API, default connection for convenient ad-hoc calls + + +// EXAMPLE 1: simplified API, default connection for convenient ad-hoc calls $answer = Inference::text('What is capital of Germany'); echo "USER: What is capital of Germany\n"; @@ -31,7 +33,9 @@ assert(Str::contains($answer, 'Berlin')); -// regular API, allows to customize inference options + + +// EXAMPLE 2: regular API, allows to customize inference options $answer = (new Inference) ->withConnection('openai') // optional, default is set in /config/llm.php ->create( @@ -44,7 +48,10 @@ echo "ASSISTANT: $answer\n"; assert(Str::contains($answer, 'Paris')); -// regular API, allows to customize inference options + + + +// EXAMPLE 3: streaming response $stream = (new Inference) ->create( messages: [['role' => 'user', 'content' => 'Describe capital of Brasil']], diff --git a/examples/A05_Extras/LLMJson/run.php b/examples/A05_Extras/LLMJson/run.php index 01cb2f7b..cfda05cd 100644 --- a/examples/A05_Extras/LLMJson/run.php +++ b/examples/A05_Extras/LLMJson/run.php @@ -35,7 +35,6 @@ use Cognesy\Instructor\Enums\Mode; use Cognesy\Instructor\Extras\LLM\Inference; -// regular API, allows to customize inference options $data = (new Inference) ->withConnection('openai') // optional, default is set in /config/llm.php ->create( diff --git a/examples/A05_Extras/LLMJsonSchema/run.php b/examples/A05_Extras/LLMJsonSchema/run.php index 28281db3..af47c397 100644 --- a/examples/A05_Extras/LLMJsonSchema/run.php +++ b/examples/A05_Extras/LLMJsonSchema/run.php @@ -26,7 +26,6 @@ use Cognesy\Instructor\Enums\Mode;use Cognesy\Instructor\Extras\LLM\Inference; -// regular API, allows to customize inference options $data = (new Inference) ->withConnection('openai') ->create( diff --git a/examples/A05_Extras/LLMMdJson/run.php b/examples/A05_Extras/LLMMdJson/run.php index ab6f3558..28de98ee 100644 --- a/examples/A05_Extras/LLMMdJson/run.php +++ b/examples/A05_Extras/LLMMdJson/run.php @@ -28,7 +28,6 @@ use Cognesy\Instructor\Enums\Mode;use Cognesy\Instructor\Extras\LLM\Inference; -// regular API, allows to customize inference options $data = (new Inference) ->withConnection('openai') ->create( diff --git a/examples/A05_Extras/LLMTools/run.php b/examples/A05_Extras/LLMTools/run.php index ba09bc95..63379d13 100644 --- a/examples/A05_Extras/LLMTools/run.php +++ b/examples/A05_Extras/LLMTools/run.php @@ -24,7 +24,6 @@ use Cognesy\Instructor\Enums\Mode; use Cognesy\Instructor\Extras\LLM\Inference; -// regular API, allows to customize inference options $data = (new Inference) ->withConnection('openai') ->create( diff --git a/examples/A05_Extras/Schema/run.php b/examples/A05_Extras/Schema/run.php index b2787b69..3470a91a 100644 --- a/examples/A05_Extras/Schema/run.php +++ b/examples/A05_Extras/Schema/run.php @@ -27,7 +27,6 @@ class City { $schema = (new SchemaFactory)->schema(City::class); -// regular API, allows to customize inference options $data = (new Inference) ->withConnection('openai') ->create( diff --git a/examples/A05_Extras/SchemaDynamic/run.php b/examples/A05_Extras/SchemaDynamic/run.php new file mode 100644 index 00000000..7fb244c3 --- /dev/null +++ b/examples/A05_Extras/SchemaDynamic/run.php @@ -0,0 +1,60 @@ +--- +title: 'Generating JSON Schema from PHP classes' +docname: 'schema' +--- + +## Overview + +Instructor has a built-in support for generating JSON Schema from +dynamic objects with `Structure` class. + +This is useful when the data model is built during runtime or defined +by your app users. + +`Structure` helps you flexibly design and modify data models that +can change with every request or user input and allows you to generate +JSON Schema for them. + +## Example + +```php +add('Cognesy\\Instructor\\', __DIR__ . '../../src/'); + +use Cognesy\Instructor\Enums\Mode; +use Cognesy\Instructor\Extras\LLM\Inference; +use Cognesy\Instructor\Extras\Structure\Field; +use Cognesy\Instructor\Extras\Structure\Structure; + +$city = Structure::define('city', [ + Field::string('name', 'City name')->required(), + Field::int('population', 'City population')->required(), + Field::int('founded', 'Founding year')->required(), +]); + +$data = (new Inference) + ->withConnection('openai') + ->create( + messages: [['role' => 'user', 'content' => 'What is capital of France? \ + Respond with JSON data.']], + responseFormat: [ + 'type' => 'json_schema', + 'description' => 'City data', + 'json_schema' => [ + 'name' => 'city_data', + 'schema' => $city->toJsonSchema(), + 'strict' => true, + ], + ], + options: ['max_tokens' => 64], + mode: Mode::JsonSchema, + ) + ->toJson(); + +echo "USER: What is capital of France\n"; +echo "ASSISTANT:\n"; +dump($data); + +?> +``` diff --git a/src/Extras/Structure/Traits/Field/HandlesOptionality.php b/src/Extras/Structure/Traits/Field/HandlesOptionality.php index 7faff1ae..d0621932 100644 --- a/src/Extras/Structure/Traits/Field/HandlesOptionality.php +++ b/src/Extras/Structure/Traits/Field/HandlesOptionality.php @@ -5,7 +5,7 @@ trait HandlesOptionality { private bool $required = true; - public function required(bool $isRequired = false) : self { + public function required(bool $isRequired = true) : self { $this->required = $isRequired; return $this; }