Skip to content

Commit

Permalink
Docs update
Browse files Browse the repository at this point in the history
  • Loading branch information
ddebowczyk committed Oct 15, 2024
1 parent 5a948ca commit f66ebc7
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 21 deletions.
24 changes: 12 additions & 12 deletions docs/cookbook/examples/advanced/context_cache_llm.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,12 @@ $loader->add('Cognesy\\Instructor\\', __DIR__ . '../../src/');
use Cognesy\Instructor\Features\LLM\Inference;
use Cognesy\Instructor\Utils\Str;

$content = file_get_contents(__DIR__ . '/../../../README.md');
$data = file_get_contents(__DIR__ . '/../../../README.md');

$inference = (new Inference)->withConnection('anthropic')->withCachedContext(
messages: [
['role' => 'user', 'content' => 'Here is content of README.md file'],
['role' => 'user', 'content' => $content],
['role' => 'user', 'content' => $data],
['role' => 'user', 'content' => 'Generate short, very domain specific pitch of the project described in README.md'],
['role' => 'assistant', 'content' => 'For whom do you want to generate the pitch?'],
],
Expand All @@ -49,13 +49,13 @@ $response = $inference->create(

print("----------------------------------------\n");
print("\n# Summary for CTO of lead gen vendor\n");
print(" ($response->cacheReadTokens tokens read from cache)\n\n");
print(" ({$response->usage()->cacheReadTokens} tokens read from cache)\n\n");
print("----------------------------------------\n");
print($response->content . "\n");
print($response->content() . "\n");

assert(!empty($response->content));
assert(Str::contains($response->content, 'Instructor'));
assert(Str::contains($response->content, 'lead', false));
assert(!empty($response->content()));
assert(Str::contains($response->content(), 'Instructor'));
assert(Str::contains($response->content(), 'lead', false));

$response2 = $inference->create(
messages: [['role' => 'user', 'content' => 'CIO of insurance company']],
Expand All @@ -64,13 +64,13 @@ $response2 = $inference->create(

print("----------------------------------------\n");
print("\n# Summary for CIO of insurance company\n");
print(" ($response2->cacheReadTokens tokens read from cache)\n\n");
print(" ({$response2->usage()->cacheReadTokens} tokens read from cache)\n\n");
print("----------------------------------------\n");
print($response2->content . "\n");
print($response2->content() . "\n");

assert(!empty($response2->content));
assert(Str::contains($response2->content, 'Instructor'));
assert(Str::contains($response2->content, 'insurance', false));
assert(!empty($response2->content()));
assert(Str::contains($response2->content(), 'Instructor'));
assert(Str::contains($response2->content(), 'insurance', false));
//assert($response2->cacheReadTokens > 0);
?>
```
1 change: 1 addition & 0 deletions docs/mint.json
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,7 @@
"cookbook/prompting/misc/restate_instructions",
"cookbook/prompting/misc/rewrite_instructions",
"cookbook/prompting/misc/search_query_expansion",
"cookbook/prompting/misc/chain_of_summaries",
"cookbook/prompting/misc/component_reuse",
"cookbook/prompting/misc/component_reuse_cot"
]
Expand Down
14 changes: 5 additions & 9 deletions examples/B07_Misc/Summary/run.php
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

## Overview

This is an example of a simple summarization.
This is an example of a simple summarization with keyword extraction.

## Example

Expand All @@ -14,10 +14,8 @@
$loader = require 'vendor/autoload.php';
$loader->add('Cognesy\\Instructor\\', __DIR__ . '../../src/');

use Cognesy\Instructor\Enums\Mode;
use Cognesy\Instructor\Features\LLM\Inference;
use Cognesy\Instructor\Features\Schema\Attributes\Description;
use Cognesy\Instructor\Instructor;
use Cognesy\Instructor\Utils\Debug\Debug;

$report = <<<EOT
[2021-09-01]
Expand All @@ -44,19 +42,17 @@
EOT;

class Summary {
#[Description('Project summary, not longer than 3 sentences')]
public string $summary = '';
#[Description('5 most relevant keywords extracted from the summary')]
public array $keywords = [];
}

$summary = (new Instructor)
->withConnection('openai')
->withCachedContext(messages: $report)
->request(
input: $report,
responseModel: Summary::class,
prompt: 'Create a condensed, 2-3 sentence summary of the project status',
options: [
'max_tokens' => 256,
],
)
->get();

Expand Down

0 comments on commit f66ebc7

Please sign in to comment.