Skip to content
Draft
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .changeset/breezy-moles-fix.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
---
"@openai/agents-core": patch
"@openai/agents-openai": patch
---

feat: fix #272 add memory feature
1 change: 1 addition & 0 deletions examples/memory/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
tmp/
216 changes: 216 additions & 0 deletions examples/memory/file.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,216 @@
import type { AgentInputItem, Session } from '@openai/agents';
import { protocol } from '@openai/agents';
import * as fs from 'node:fs/promises';
import * as path from 'node:path';
import { randomUUID } from 'node:crypto';

export type FileSessionOptions = {
/**
* Directory where session files are stored. Defaults to `./.agents-sessions`.
*/
dir?: string;
/**
* Optional pre-existing session id to bind to.
*/
sessionId?: string;
};

/**
* A simple filesystem-backed Session implementation that stores history as a JSON array.
*/
export class FileSession implements Session {
#dir: string;
#sessionId?: string;

constructor(options: FileSessionOptions = {}) {
this.#dir = options.dir ?? path.resolve(process.cwd(), '.agents-sessions');
this.#sessionId = options.sessionId;
}

/**
* Get the current session id, creating one if necessary.
*/
async getSessionId(): Promise<string> {
if (!this.#sessionId) {
// Compact, URL-safe-ish id without dashes.
this.#sessionId = randomUUID().replace(/-/g, '').slice(0, 24);
}
await this.#ensureDir();
// Ensure the file exists.
const file = this.#filePath(this.#sessionId);
try {
await fs.access(file);
} catch {
await fs.writeFile(file, '[]', 'utf8');
}
return this.#sessionId;
}

/**
* Retrieve items from the conversation history.
*/
async getItems(limit?: number): Promise<AgentInputItem[]> {
const sessionId = await this.getSessionId();
const items = await this.#readItems(sessionId);
if (typeof limit === 'number' && limit >= 0) {
return items.slice(-limit);
}
return items;
}

/**
* Append new items to the conversation history.
*/
async addItems(items: AgentInputItem[]): Promise<void> {
if (!items.length) return;
const sessionId = await this.getSessionId();
const current = await this.#readItems(sessionId);
const next = current.concat(items);
await this.#writeItems(sessionId, next);
}

/**
* Remove and return the most recent item, if any.
*/
async popItem(): Promise<AgentInputItem | undefined> {
const sessionId = await this.getSessionId();
const items = await this.#readItems(sessionId);
if (items.length === 0) return undefined;
const popped = items.pop();
await this.#writeItems(sessionId, items);
return popped;
}

/**
* Delete all stored items and reset the session state.
*/
async clearSession(): Promise<void> {
if (!this.#sessionId) return; // Nothing to clear.
const file = this.#filePath(this.#sessionId);
try {
await fs.unlink(file);
} catch {
// Ignore if already removed or inaccessible.
}
this.#sessionId = undefined;
}

// Internal helpers
async #ensureDir(): Promise<void> {
await fs.mkdir(this.#dir, { recursive: true });
}

#filePath(sessionId: string): string {
return path.join(this.#dir, `${sessionId}.json`);
}

async #readItems(sessionId: string): Promise<AgentInputItem[]> {
const file = this.#filePath(sessionId);
try {
const data = await fs.readFile(file, 'utf8');
const parsed = JSON.parse(data);
if (!Array.isArray(parsed)) return [];
// Validate and coerce items to the protocol shape where possible.
const result: AgentInputItem[] = [];
for (const raw of parsed) {
const check = protocol.ModelItem.safeParse(raw);
if (check.success) {
result.push(check.data as AgentInputItem);
}
// Silently skip invalid entries.
}
return result;
} catch (err: any) {
// On missing file, return empty list.
if (err && (err.code === 'ENOENT' || err.code === 'ENOTDIR')) return [];
// For other errors, rethrow.
throw err;
}
}

async #writeItems(sessionId: string, items: AgentInputItem[]): Promise<void> {
await this.#ensureDir();
const file = this.#filePath(sessionId);
// Keep JSON compact but deterministic.
await fs.writeFile(file, JSON.stringify(items, null, 2), 'utf8');
}
}

import { Agent, run } from '@openai/agents';

async function main() {
const agent = new Agent({
name: 'Assistant',
instructions: 'You are a helpful assistant. be VERY concise.',
});

const session = new FileSession({ dir: './tmp/' });
let result = await run(
agent,
'What is the largest country in South America?',
{ session },
);
console.log(result.finalOutput); // e.g., Brazil

result = await run(agent, 'What is the capital of that country?', {
session,
});
console.log(result.finalOutput); // e.g., Brasilia
}

async function mainStream() {
const agent = new Agent({
name: 'Assistant',
instructions: 'You are a helpful assistant. be VERY concise.',
});

const session = new FileSession({ dir: './tmp/' });
let result = await run(
agent,
'What is the largest country in South America?',
{
stream: true,
session,
},
);

for await (const event of result) {
if (
event.type === 'raw_model_stream_event' &&
event.data.type === 'output_text_delta'
)
process.stdout.write(event.data.delta);
}
console.log();

result = await run(agent, 'What is the capital of that country?', {
stream: true,
session,
});

// toTextStream() automatically returns a readable stream of strings intended to be displayed
// to the user
for await (const event of result.toTextStream()) {
process.stdout.write(event);
}
console.log();
}

async function promptAndRun() {
const readline = await import('node:readline/promises');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
const isStream = await rl.question('Run in stream mode? (y/n): ');
rl.close();
if (isStream.trim().toLowerCase() === 'y') {
await mainStream();
} else {
await main();
}
}

if (require.main === module) {
promptAndRun().catch(console.error);
}
78 changes: 78 additions & 0 deletions examples/memory/oai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import { Agent, OpenAIConversationsSession, run } from '@openai/agents';

async function main() {
const agent = new Agent({
name: 'Assistant',
instructions: 'You are a helpful assistant. be VERY concise.',
});

const session = new OpenAIConversationsSession();
let result = await run(
agent,
'What is the largest country in South America?',
{ session },
);
console.log(result.finalOutput); // e.g., Brazil

result = await run(agent, 'What is the capital of that country?', {
session,
});
console.log(result.finalOutput); // e.g., Brasilia
}

async function mainStream() {
const agent = new Agent({
name: 'Assistant',
instructions: 'You are a helpful assistant. be VERY concise.',
});

const session = new OpenAIConversationsSession();
let result = await run(
agent,
'What is the largest country in South America?',
{
stream: true,
session,
},
);

for await (const event of result) {
if (
event.type === 'raw_model_stream_event' &&
event.data.type === 'output_text_delta'
)
process.stdout.write(event.data.delta);
}
console.log();

result = await run(agent, 'What is the capital of that country?', {
stream: true,
session,
});

// toTextStream() automatically returns a readable stream of strings intended to be displayed
// to the user
for await (const event of result.toTextStream()) {
process.stdout.write(event);
}
console.log();
}

async function promptAndRun() {
const readline = await import('node:readline/promises');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
const isStream = await rl.question('Run in stream mode? (y/n): ');
rl.close();
if (isStream.trim().toLowerCase() === 'y') {
await mainStream();
} else {
await main();
}
}

if (require.main === module) {
promptAndRun().catch(console.error);
}
12 changes: 12 additions & 0 deletions examples/memory/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"private": true,
"name": "memory",
"dependencies": {
"@openai/agents": "workspace:*"
},
"scripts": {
"build-check": "tsc --noEmit",
"start:oai": "tsx oai.ts",
"start:file": "tsx file.ts"
}
}
3 changes: 3 additions & 0 deletions examples/memory/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"extends": "../../tsconfig.examples.json"
}
1 change: 1 addition & 0 deletions packages/agents-core/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ export type {
StreamEventGenericItem,
} from './types';
export { Usage } from './usage';
export type { Session } from './memory/session';

/**
* Exporting the whole protocol as an object here. This contains both the types
Expand Down
37 changes: 37 additions & 0 deletions packages/agents-core/src/memory/session.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import type { AgentInputItem } from '../types';

/**
* Interface representing a persistent session store for conversation history.
*/
export interface Session {
/**
* Ensure and return the identifier for this session.
*/
getSessionId(): Promise<string>;

/**
* Retrieve items from the conversation history.
*
* @param limit - The maximum number of items to return. When provided the most
* recent {@link limit} items should be returned in chronological order.
*/
getItems(limit?: number): Promise<AgentInputItem[]>;

/**
* Append new items to the conversation history.
*
* @param items - Items to add to the session history.
*/
addItems(items: AgentInputItem[]): Promise<void>;

/**
* Remove and return the most recent item from the conversation history if it
* exists.
*/
popItem(): Promise<AgentInputItem | undefined>;

/**
* Remove all items that belong to the session and reset its state.
*/
clearSession(): Promise<void>;
}
Loading
Loading