Skip to content

Commit b8b387e

Browse files
committed
Docs update
1 parent 290a655 commit b8b387e

File tree

698 files changed

+9737
-28741
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

698 files changed

+9737
-28741
lines changed

README.md

Lines changed: 99 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,89 @@ This monorepo contains a set of dev-friendly, framework agnostic components offe
1313
- Docs website (Mintlify) https://docs.instructorphp.com
1414
- Docs (Github Pages) https://cognesy.github.io/instructor-php/
1515

16+
## Overview of Capabilities
17+
18+
The library offers a set of small, focused building blocks.
19+
20+
### Structured output
21+
22+
Purpose: turn messy model output into typed PHP data.
23+
Benefit: you stop hand-parsing JSON or text before using LLM results.
24+
25+
```php
26+
use Cognesy\Instructor\StructuredOutput;
27+
28+
final class Person {
29+
public string $name;
30+
public int $age;
31+
}
32+
33+
$person = StructuredOutput::using('openai')
34+
->with(messages: 'Jason is 28 years old.', responseModel: Person::class)
35+
->get();
36+
```
37+
38+
Detailed docs: [packages/instructor/docs/](packages/instructor/docs/)
39+
40+
### Unified inference
41+
42+
Purpose: call different LLM providers through one API.
43+
Benefit: switch providers without rewriting request code.
44+
45+
```php
46+
use Cognesy\Polyglot\Inference\Inference;
47+
48+
$text = Inference::using('openai')
49+
->withMessages('Say hello in one sentence.')
50+
->get();
51+
```
52+
53+
Detailed docs: [packages/polyglot/docs/](packages/polyglot/docs/)
54+
55+
### Embeddings
56+
57+
Purpose: generate vectors through the same provider layer.
58+
Benefit: keep retrieval and inference in one stack.
59+
60+
```php
61+
use Cognesy\Polyglot\Embeddings\Embeddings;
62+
63+
$vectors = Embeddings::using('openai')
64+
->withInputs(['hello world'])
65+
->vectors();
66+
```
67+
68+
Detailed docs: [packages/polyglot/docs/](packages/polyglot/docs/)
69+
70+
### Agents SDK
71+
72+
Purpose: build tool-using agents as a simple loop over state.
73+
Benefit: add tools and control flow without inventing your own agent runtime first.
74+
75+
```php
76+
use Cognesy\Agents\AgentLoop;
77+
use Cognesy\Agents\Data\AgentState;
78+
79+
$result = AgentLoop::default()->execute(
80+
AgentState::empty()->withUserMessage('What is 2+2?')
81+
);
82+
```
83+
84+
Detailed docs: [packages/agents/docs/](packages/agents/docs/)
85+
86+
### Code agent bridges
87+
88+
Purpose: drive external coding agents like Codex, Claude Code, and OpenCode from PHP.
89+
Benefit: automate reviews, summaries, and coding workflows through one interface.
90+
91+
```php
92+
use Cognesy\AgentCtrl\AgentCtrl;
93+
94+
$response = AgentCtrl::codex()->execute('Summarize this repository.');
95+
```
96+
97+
Detailed docs: [packages/agent-ctrl/docs/](packages/agent-ctrl/docs/)
98+
1699

17100
## What is Instructor?
18101

@@ -149,6 +232,7 @@ Instructor validates results of LLM response against validation rules specified
149232
> For further details on available validation rules, check [Symfony Validation constraints](https://symfony.com/doc/current/validation.html#constraints).
150233
151234
```php
235+
use Cognesy\Instructor\StructuredOutput;
152236
use Symfony\Component\Validator\Constraints as Assert;
153237

154238
class Person {
@@ -226,8 +310,9 @@ Polyglot takes care of translation of familiar OpenAI chat completion API conven
226310
### Example (using sync API)
227311

228312
```php
229-
$answer = (new Inference)
230-
->using('openai') // specify LLM connection preset (defined in config)
313+
use Cognesy\Polyglot\Inference\Inference;
314+
315+
$answer = Inference::using('openai') // specify LLM connection preset (defined in config)
231316
->with(messages: 'What is capital of Germany')
232317
->get();
233318

@@ -237,8 +322,9 @@ echo $answer;
237322
### Example (using streaming API)
238323

239324
```php
240-
$stream = (new Inference)
241-
->using('anthropic') // specify LLM connection preset (defined in config)
325+
use Cognesy\Polyglot\Inference\Inference;
326+
327+
$stream = Inference::using('anthropic') // specify LLM connection preset (defined in config)
242328
->withMessages([['role' => 'user', 'content' => 'Describe capital of Brasil']])
243329
->withOptions(['max_tokens' => 256])
244330
->withStreaming()
@@ -253,20 +339,17 @@ foreach ($stream as $delta) {
253339
### Example (customize LLM connection)
254340

255341
```php
256-
$config = new LLMConfig(
257-
apiUrl : 'https://api.deepseek.com',
258-
apiKey : Env::get('DEEPSEEK_API_KEY'),
259-
endpoint: '/chat/completions',
260-
model: 'deepseek-chat',
261-
maxTokens: 128,
262-
driver: 'deepseek',
263-
);
264-
265-
$answer = (new Inference)
266-
->withConfig($config)
342+
use Cognesy\Polyglot\Inference\Config\LLMConfig;
343+
use Cognesy\Polyglot\Inference\Inference;
344+
345+
$answer = Inference::fromConfig(LLMConfig::fromArray([
346+
'driver' => 'deepseek',
347+
'apiUrl' => 'https://api.deepseek.com',
348+
'endpoint' => '/chat/completions',
349+
'model' => 'deepseek-chat',
350+
]))
267351
->withMessages([['role' => 'user', 'content' => 'What is the capital of France']])
268352
->withOptions(['max_tokens' => 64])
269-
->withStreaming()
270353
->get();
271354

272355
echo $answer;

builds/docs-build/cookbook/examples/A01_Basics/basic_use_mixin.mdx

Lines changed: 10 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,12 @@
11
---
2-
title: 'Basic use via mixin'
2+
title: 'Basic use via StructuredOutput'
33
docname: 'basic_use_mixin'
44
id: '068c'
55
---
66
## Overview
77

8-
`HandlesSelfInference` is deprecated in 2.0.
9-
Prefer runtime-first usage via `StructuredOutput::using('openai')->with(...)->getObject()`.
10-
11-
Legacy mixin usage is still available and shown below for migration reference.
12-
13-
`infer()` method returns an instance of the class with the data extracted
14-
using the Instructor.
15-
16-
`infer()` method has following signature (you can also find it in the
17-
`CanSelfInfer` interface):
18-
19-
```php
20-
static public function infer(
21-
string|array $messages, // (required) The message(s) to infer data from
22-
string $prompt = '', // (optional) The prompt to use for inference
23-
array $examples = [], // (optional) Examples to include in the prompt
24-
string $model = '', // (optional) The model to use for inference (otherwise - use default)
25-
int $maxRetries = 2, // (optional) The number of retries in case of validation failure
26-
array $options = [], // (optional) Additional data to pass to the Instructor or LLM API
27-
Mode $mode = OutputMode::Tools, // (optional) The mode to use for inference
28-
?LLM $llm = null // (optional) LLM instance to use for inference
29-
) : static;
30-
```
31-
32-
Recommended replacement:
8+
Mixin-based inference was removed in 2.0.
9+
Use `StructuredOutput` directly:
3310

3411
```php
3512
use Cognesy\Instructor\StructuredOutput;
@@ -48,20 +25,19 @@ $user = StructuredOutput::using('openai')
4825
<?php
4926
require 'examples/boot.php';
5027

51-
use Cognesy\Instructor\Extras\Mixin\HandlesSelfInference;
52-
use Cognesy\Polyglot\Inference\LLMProvider;
28+
use Cognesy\Instructor\StructuredOutput;
5329

5430
class User {
55-
use HandlesSelfInference;
56-
5731
public int $age;
5832
public string $name;
5933
}
6034

61-
$user = User::infer(
62-
messages: "Jason is 25 years old and works as an engineer.",
63-
llm: LLMProvider::fromLLMConfig(ExampleConfig::llmPreset('openai')),
64-
);
35+
$user = StructuredOutput::using('openai')
36+
->with(
37+
messages: "Jason is 25 years old and works as an engineer.",
38+
responseModel: User::class,
39+
)
40+
->getObject();
6541

6642
dump($user);
6743

builds/docs-build/cookbook/examples/A01_Basics/constructor_parameters.mdx

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,6 @@ $text = <<<TEXT
5656
Jason is 25 years old.
5757
TEXT;
5858

59-
6059
$user = StructuredOutput::using('openai')
6160
->withMessages($text)
6261
->withResponseClass(UserWithConstructor::class)

builds/docs-build/cookbook/examples/A01_Basics/custom_validation.mdx

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@ offers you #[Assert/Callback] annotation to build fully customized validation lo
1616
require 'examples/boot.php';
1717

1818
use Cognesy\Instructor\StructuredOutput;
19+
use Cognesy\Instructor\StructuredOutputRuntime;
20+
use Cognesy\Polyglot\Inference\LLMProvider;
1921
use Symfony\Component\Validator\Constraints as Assert;
2022
use Symfony\Component\Validator\Context\ExecutionContextInterface;
2123

@@ -35,12 +37,14 @@ class UserDetails
3537
}
3638
}
3739

38-
$user = StructuredOutput::using('openai')
39-
->wiretap(fn($e) => $e->print())
40+
$runtime = StructuredOutputRuntime::fromProvider(LLMProvider::using('openai'))
41+
->withMaxRetries(2)
42+
->wiretap(fn($e) => $e->print());
43+
44+
$user = (new StructuredOutput($runtime))
4045
->with(
4146
messages: [['role' => 'user', 'content' => 'jason is 25 years old']],
4247
responseModel: UserDetails::class,
43-
maxRetries: 2
4448
)
4549
->get();
4650

builds/docs-build/cookbook/examples/A01_Basics/getters_and_setters.mdx

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ and default value to determine if property is required.
1919
require 'examples/boot.php';
2020

2121
use Cognesy\Instructor\StructuredOutput;
22+
use Cognesy\Instructor\StructuredOutputRuntime;
23+
use Cognesy\Polyglot\Inference\LLMProvider;
2224
use Cognesy\Schema\Attributes\Description;
2325

2426
class UserWithSetter
@@ -72,10 +74,12 @@ $text = <<<TEXT
7274
TEXT;
7375

7476

75-
$user = StructuredOutput::using('openai')
77+
$user = new StructuredOutput(
78+
StructuredOutputRuntime::fromProvider(LLMProvider::using('openai'))
79+
->withMaxRetries(2)
80+
)
7681
->withMessages($text)
7782
->withResponseClass(UserWithSetter::class)
78-
->withMaxRetries(2)
7983
->get();
8084

8185
dump($user);

builds/docs-build/cookbook/examples/A01_Basics/maybe.mdx

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,9 @@ require 'examples/boot.php';
1717

1818
use Cognesy\Instructor\Extras\Maybe\Maybe;
1919
use Cognesy\Instructor\StructuredOutput;
20-
use Cognesy\Polyglot\Inference\Enums\OutputMode;
20+
use Cognesy\Instructor\StructuredOutputRuntime;
21+
use Cognesy\Instructor\Enums\OutputMode;
22+
use Cognesy\Polyglot\Inference\LLMProvider;
2123

2224
class User
2325
{
@@ -29,11 +31,13 @@ class User
2931
$text = 'We have no information about our new developer.';
3032
echo "\nINPUT:\n$text\n";
3133

32-
$maybeUser = StructuredOutput::using('openai')->with(
34+
$maybeUser = (new StructuredOutput(
35+
StructuredOutputRuntime::fromProvider(LLMProvider::using('openai'))
36+
->withOutputMode(OutputMode::MdJson)
37+
))->with(
3338
messages: [['role' => 'user', 'content' => $text]],
3439
responseModel: Maybe::is(User::class),
3540
model: 'gpt-4o-mini',
36-
mode: OutputMode::MdJson,
3741
)->get();
3842

3943
echo "\nOUTPUT:\n";

builds/docs-build/cookbook/examples/A01_Basics/messages_api.mdx

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,11 @@ messages and their sequences.
1616
require 'examples/boot.php';
1717

1818
use Cognesy\Instructor\StructuredOutput;
19+
use Cognesy\Instructor\StructuredOutputRuntime;
1920
use Cognesy\Messages\Message;
2021
use Cognesy\Messages\Messages;
21-
use Cognesy\Polyglot\Inference\Enums\OutputMode;
22+
use Cognesy\Instructor\Enums\OutputMode;
23+
use Cognesy\Polyglot\Inference\LLMProvider;
2224
use Cognesy\Utils\Str;
2325

2426
class Code {
@@ -43,10 +45,12 @@ $lastMessageId = $messages->last()->id()->toString();
4345
print("Last message ID: {$lastMessageId}\n");
4446

4547
print("Extracting structured data using LLM...\n\n");
46-
$code = StructuredOutput::using('openai')
48+
$code = (new StructuredOutput(
49+
StructuredOutputRuntime::fromProvider(LLMProvider::using('openai'))
50+
->withOutputMode(OutputMode::MdJson)
51+
))
4752
->withMessages($messages)
4853
->withResponseModel(Code::class)
49-
->withOutputMode(OutputMode::MdJson)
5054
->get();
5155

5256
print("Extracted data:\n");

0 commit comments

Comments
 (0)