Skip to content

Commit

Permalink
Refactorings and cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
ddebowczyk committed Sep 28, 2024
1 parent 5e29977 commit dbb2aa8
Show file tree
Hide file tree
Showing 247 changed files with 1,018 additions and 5,487 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ method with the connection name.
<?php
// ...
$user = (new Instructor)
->withClient('ollama')
->withConnection('ollama')
->respond(
messages: "His name is Jason and he is 28 years old.",
responseModel: Person::class,
Expand Down
4 changes: 0 additions & 4 deletions composer.json
Original file line number Diff line number Diff line change
Expand Up @@ -67,11 +67,7 @@
"phpstan/phpdoc-parser": "^1.29",
"psr/log": "^3.0",
"psr/event-dispatcher": "^1.0",
"psr/container": "^2.0",
"ramsey/uuid": "^4.7",
"saloonphp/saloon": "^3.8",
"saloonphp/cache-plugin": "^3.0",
"symfony/http-client": "^6.4 || ^7.0",
"symfony/property-access": "^6.4 || ^7.0",
"symfony/property-info": "^6.4 || ^7.0",
"symfony/serializer": "^6.4 || ^7.0",
Expand Down
12 changes: 12 additions & 0 deletions config/debug.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
<?php
return [
'http' => [
'enabled' => false,
'trace' => false,
'request_headers' => false,
'request_body' => true,
'response_headers' => false,
'response_body' => true,
'response_stream' => true,
],
];
31 changes: 9 additions & 22 deletions config/embed.php
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
<?php
use Cognesy\Instructor\ApiClient\Enums\ClientType;

use Cognesy\Instructor\Extras\LLM\Enums\LLMProviderType;
use Cognesy\Instructor\Utils\Env;

return [
Expand All @@ -10,7 +11,7 @@
'defaultConnection' => 'openai',
'connections' => [
'azure' => [
'clientType' => ClientType::Azure->value,
'providerType' => LLMProviderType::Azure->value,
'apiUrl' => 'https://{resourceName}.openai.azure.com/openai/deployments/{deploymentId}',
'apiKey' => Env::get('AZURE_OPENAI_EMBED_API_KEY', ''),
'endpoint' => '/embeddings',
Expand All @@ -22,33 +23,27 @@
'defaultModel' => 'text-embedding-3-small',
'defaultDimensions' => 1536,
'maxInputs' => 16,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'cohere' => [
'clientType' => ClientType::Cohere->value,
'providerType' => LLMProviderType::Cohere->value,
'apiUrl' => 'https://api.cohere.ai/v1',
'apiKey' => Env::get('COHERE_API_KEY', ''),
'endpoint' => '/embed',
'defaultModel' => 'embed-multilingual-v3.0',
'defaultDimensions' => 1024,
'maxInputs' => 96,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'gemini' => [
'clientType' => ClientType::Gemini->value,
'providerType' => LLMProviderType::Gemini->value,
'apiUrl' => 'https://generativelanguage.googleapis.com/v1beta',
'apiKey' => Env::get('GEMINI_API_KEY', ''),
'endpoint' => '/{model}:batchEmbedContents',
'defaultModel' => 'models/text-embedding-004',
'defaultDimensions' => 768,
'maxInputs' => 100, // max 2048 tokens
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'jina' => [
'clientType' => ClientType::Jina->value,
'providerType' => LLMProviderType::Jina->value,
'apiUrl' => 'https://api.jina.ai/v1',
'apiKey' => Env::get('JINA_API_KEY', ''),
'endpoint' => '/embeddings',
Expand All @@ -58,33 +53,27 @@
'defaultModel' => 'jina-embeddings-v2-base-en',
'defaultDimensions' => 768,
'maxInputs' => 500, // max 8192 tokens
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'mistral' => [
'clientType' => ClientType::Mistral->value,
'providerType' => LLMProviderType::Mistral->value,
'apiUrl' => 'https://api.mistral.ai/v1',
'apiKey' => Env::get('MISTRAL_API_KEY', ''),
'endpoint' => '/embeddings',
'defaultModel' => 'mistral-embed',
'defaultDimensions' => 1024,
'maxInputs' => 512, // max 512 tokens
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'ollama' => [
'clientType' => ClientType::Ollama->value,
'providerType' => LLMProviderType::Ollama->value,
'apiUrl' => 'http://localhost:11434/v1',
'apiKey' => Env::get('OLLAMA_API_KEY', ''),
'endpoint' => '/embeddings',
'defaultModel' => 'nomic-embed-text',
'defaultDimensions' => 1024,
'maxInputs' => 512,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'openai' => [
'clientType' => ClientType::OpenAI->value,
'providerType' => LLMProviderType::OpenAI->value,
'apiUrl' => 'https://api.openai.com/v1',
'apiKey' => Env::get('OPENAI_API_KEY', ''),
'endpoint' => '/embeddings',
Expand All @@ -94,8 +83,6 @@
'defaultModel' => 'text-embedding-3-small',
'defaultDimensions' => 1536,
'maxInputs' => 2048, // max 8192 tokens
'connectTimeout' => 3,
'requestTimeout' => 30,
],
],
];
21 changes: 21 additions & 0 deletions config/http.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
<?php

use Cognesy\Instructor\Extras\Enums\HttpClientType;

return [
'defaultClient' => 'guzzle',

'cache' => [
'enabled' => false,
'expiryInSeconds' => 3600,
'path' => '/tmp/instructor/cache',
],

'clients' => [
'guzzle' => [
'httpClientType' => HttpClientType::Guzzle->value,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
]
];
76 changes: 14 additions & 62 deletions config/llm.php
Original file line number Diff line number Diff line change
@@ -1,31 +1,15 @@
<?php
use Cognesy\Instructor\ApiClient\Enums\ClientType;
use Cognesy\Instructor\Extras\Enums\HttpClientType;

use Cognesy\Instructor\Extras\LLM\Enums\LLMProviderType;
use Cognesy\Instructor\Utils\Env;

return [
'useObjectReferences' => false,

'debug' => [
'enabled' => false,
'http_trace' => false,
'request_body' => true,
'request_headers' => false,
'response_body' => true,
'response_headers' => false,
],

'cache' => [
'enabled' => false,
'expiryInSeconds' => 3600,
'path' => '/tmp/instructor/cache',
],

'defaultConnection' => 'openai',

'connections' => [
'anthropic' => [
'clientType' => ClientType::Anthropic->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::Anthropic->value,
'apiUrl' => 'https://api.anthropic.com/v1',
'apiKey' => Env::get('ANTHROPIC_API_KEY', ''),
'endpoint' => '/messages',
Expand All @@ -35,12 +19,9 @@
],
'defaultModel' => 'claude-3-haiku-20240307',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'azure' => [
'clientType' => ClientType::Azure->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::Azure->value,
'apiUrl' => 'https://{resourceName}.openai.azure.com/openai/deployments/{deploymentId}',
'apiKey' => Env::get('AZURE_OPENAI_API_KEY', ''),
'endpoint' => '/chat/completions',
Expand All @@ -51,78 +32,57 @@
],
'defaultModel' => 'gpt-4o-mini',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'cohere' => [
'clientType' => ClientType::Cohere->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::Cohere->value,
'apiUrl' => 'https://api.cohere.ai/v1',
'apiKey' => Env::get('COHERE_API_KEY', ''),
'endpoint' => '/chat',
'defaultModel' => 'command-r-plus-08-2024',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'fireworks' => [
'clientType' => ClientType::Fireworks->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::Fireworks->value,
'apiUrl' => 'https://api.fireworks.ai/inference/v1',
'apiKey' => Env::get('FIREWORKS_API_KEY', ''),
'endpoint' => '/chat/completions',
'defaultModel' => 'accounts/fireworks/models/mixtral-8x7b-instruct',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'gemini' => [
'clientType' => ClientType::Gemini->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::Gemini->value,
'apiUrl' => 'https://generativelanguage.googleapis.com/v1beta',
'apiKey' => Env::get('GEMINI_API_KEY', ''),
'endpoint' => '/models/{model}:generateContent',
'defaultModel' => 'gemini-1.5-flash-latest',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'groq' => [
'clientType' => ClientType::Groq->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::Groq->value,
'apiUrl' => 'https://api.groq.com/openai/v1',
'apiKey' => Env::get('GROQ_API_KEY', ''),
'endpoint' => '/chat/completions',
'defaultModel' => 'llama3-groq-8b-8192-tool-use-preview', // 'gemma2-9b-it',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'mistral' => [
'clientType' => ClientType::Mistral->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::Mistral->value,
'apiUrl' => 'https://api.mistral.ai/v1',
'apiKey' => Env::get('MISTRAL_API_KEY', ''),
'endpoint' => '/chat/completions',
'defaultModel' => 'mistral-small-latest',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'ollama' => [
'clientType' => ClientType::Ollama->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::Ollama->value,
'apiUrl' => 'http://localhost:11434/v1',
'apiKey' => Env::get('OLLAMA_API_KEY', ''),
'endpoint' => '/chat/completions',
'defaultModel' => 'llama3.2:3b', //'gemma2:2b',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 60,
],
'openai' => [
'clientType' => ClientType::OpenAI->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::OpenAI->value,
'apiUrl' => 'https://api.openai.com/v1',
'apiKey' => Env::get('OPENAI_API_KEY', ''),
'endpoint' => '/chat/completions',
Expand All @@ -131,30 +91,22 @@
],
'defaultModel' => 'gpt-4o-mini',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'openrouter' => [
'clientType' => ClientType::OpenRouter->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::OpenRouter->value,
'apiUrl' => 'https://openrouter.ai/api/v1/',
'apiKey' => Env::get('OPENROUTER_API_KEY', ''),
'endpoint' => '/chat/completions',
'defaultModel' => 'qwen/qwen-2.5-72b-instruct', //'microsoft/phi-3.5-mini-128k-instruct',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
'together' => [
'clientType' => ClientType::Together->value,
'httpClient' => HttpClientType::Guzzle->value,
'providerType' => LLMProviderType::Together->value,
'apiUrl' => 'https://api.together.xyz/v1',
'apiKey' => Env::get('TOGETHER_API_KEY', ''),
'endpoint' => '/chat/completions',
'defaultModel' => 'mistralai/Mixtral-8x7B-Instruct-v0.1',
'defaultMaxTokens' => 1024,
'connectTimeout' => 3,
'requestTimeout' => 30,
],
],
];
2 changes: 1 addition & 1 deletion docs/cookbook/examples/api_support/anthropic.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class User {

// Get Instructor with specified LLM client connection
// See: /config/llm.php to check or change LLM client connection configuration details
$instructor = (new Instructor)->withClient('anthropic');
$instructor = (new Instructor)->withConnection('anthropic');

$user = $instructor->respond(
messages: "Jason (@jxnlco) is 25 years old and is the admin of this project. He likes playing football and reading books.",
Expand Down
2 changes: 1 addition & 1 deletion docs/cookbook/examples/api_support/azure_openai.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class User {

// Get Instructor with specified LLM client connection
// See: /config/llm.php to check or change LLM client connection configuration details
$instructor = (new Instructor)->withClient('azure');
$instructor = (new Instructor)->withConnection('azure');

// Call with your model name and preferred execution mode
$user = $instructor->respond(
Expand Down
2 changes: 1 addition & 1 deletion docs/cookbook/examples/api_support/cohere.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class User {

// Get Instructor with specified LLM client connection
// See: /config/llm.php to check or change LLM client connection configuration details
$instructor = (new Instructor)->withClient('cohere');
$instructor = (new Instructor)->withConnection('cohere');

$user = $instructor->respond(
messages: "Jason (@jxnlco) is 25 years old and is the admin of this project. He likes playing football and reading books.",
Expand Down
2 changes: 1 addition & 1 deletion docs/cookbook/examples/api_support/fireworks.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class User {

// Get Instructor with specified LLM client connection
// See: /config/llm.php to check or change LLM client connection configuration details
$instructor = (new Instructor)->withClient('fireworks');
$instructor = (new Instructor)->withConnection('fireworks');

$user = $instructor
->respond(
Expand Down
2 changes: 1 addition & 1 deletion docs/cookbook/examples/api_support/google_gemini.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class User {

// Get Instructor with specified LLM client connection
// See: /config/llm.php to check or change LLM client connection configuration details
$instructor = (new Instructor)->withClient('gemini');
$instructor = (new Instructor)->withConnection('gemini');

$user = $instructor
->respond(
Expand Down
2 changes: 1 addition & 1 deletion docs/cookbook/examples/api_support/groq.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class User {

// Get Instructor with specified LLM client connection
// See: /config/llm.php to check or change LLM client connection configuration details
$instructor = (new Instructor)->withClient('groq');
$instructor = (new Instructor)->withConnection('groq');

$user = $instructor
->respond(
Expand Down
2 changes: 1 addition & 1 deletion docs/cookbook/examples/api_support/mistralai.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class User {

// Get Instructor with specified LLM client connection
// See: /config/llm.php to check or change LLM client connection configuration details
$instructor = (new Instructor)->withClient('mistral');
$instructor = (new Instructor)->withConnection('mistral');

$user = $instructor
->respond(
Expand Down
2 changes: 1 addition & 1 deletion docs/cookbook/examples/api_support/ollama.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class User {

// Get Instructor with specified LLM client connection
// See: /config/llm.php to check or change LLM client connection configuration details
$instructor = (new Instructor)->withClient('ollama');
$instructor = (new Instructor)->withConnection('ollama');

$user = $instructor->respond(
messages: "Jason (@jxnlco) is 25 years old and is the admin of this project. He likes playing football and reading books.",
Expand Down
Loading

0 comments on commit dbb2aa8

Please sign in to comment.