Quick Start
Install CLI first:
curl -fsSL https://install.acontext.io | shIntegrate Vercel AI SDK with Acontext for session persistence and task extraction
acontext create my-ai-project --template-path "typescript/vercel-ai-basic"
curl -fsSL https://install.acontext.io | shRun agent with Acontext
Complete example
import { generateText, tool } from 'ai';
import { createOpenAI } from '@ai-sdk/openai';
import { z } from 'zod';
import { AcontextClient } from '@acontext/acontext';
import dotenv from 'dotenv';
dotenv.config();
const openaiProvider = createOpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
const client = new AcontextClient({
apiKey: process.env.ACONTEXT_API_KEY,
});
// If you're using self-hosted Acontext:
// const client = new AcontextClient({
// baseUrl: "http://localhost:8029/api/v1",
// apiKey: "sk-ac-your-root-api-bearer-token",
// });
const tools = {
get_weather: tool({
description: 'Returns weather info for the specified city.',
inputSchema: z.object({
city: z.string(),
}),
execute: async ({ city }) => `The weather in ${city} is sunny`,
}),
};
async function main() {
const session = await client.sessions.create();
const model = openaiProvider('gpt-4o-mini');
// User message
const userMsg = { role: 'user', content: "What's the weather in Helsinki?" };
await client.sessions.storeMessage(session.id, userMsg, { format: 'openai' });
// Generate response
const result = await generateText({
model,
messages: [userMsg],
tools,
});
// Store response
const assistantMsg = { role: 'assistant', content: result.text };
await client.sessions.storeMessage(session.id, assistantMsg, { format: 'openai' });
// Extract tasks
await client.sessions.flush(session.id);
const tasks = await client.sessions.getTasks(session.id);
for (const task of tasks.items) {
console.log(`Task: ${task.data.task_description} | Status: ${task.status}`);
}
}
main().catch(console.error);
const messages = await client.sessions.getMessages(sessionId, { format: 'openai' });
const conversation = messages.items;
conversation.push({ role: 'user', content: 'Continue' });
const result = await generateText({ model, messages: conversation, tools });