AI agent core functionality for Drift (Managed Cyclic Graph)
npm install @quarry-systems/drift-ai-corebash
npm install @quarry-systems/drift-ai-core @quarry-systems/drift-core @quarry-systems/drift-contracts
`
Features
- ✅ LLM Integration: Helpers for working with language models
- ✅ Tool Calling: Define and execute tools for LLM function calling
- ✅ Schema Generation: Convert TypeScript types to JSON schemas
- ✅ Runtime Validation: Validate LLM outputs against schemas
- ✅ Streaming Support: Handle streaming LLM responses
- ✅ Message Formatting: Utilities for chat message construction
- ✅ Response Parsing: Extract structured data from LLM responses
- ✅ Error Handling: Robust error handling for AI operations
Quick Start
$3
`typescript
import { ManagedCyclicGraph } from '@quarry-systems/drift-core';
import { createLLMNode, formatMessages } from '@quarry-systems/drift-ai-core';
const graph = new ManagedCyclicGraph('ai-workflow')
.node('chat', {
label: 'Chat with LLM',
execute: [async (ctx) => {
const messages = formatMessages([
{ role: 'system', content: 'You are a helpful assistant.' },
{ role: 'user', content: ctx.data.userQuery }
]);
// Use your LLM adapter
const response = await ctx.services.llm.chat(messages);
return {
...ctx,
data: {
...ctx.data,
response: response.content
}
};
}]
})
.build();
`
$3
`typescript
import { defineTool, executeToolCall } from '@quarry-systems/drift-ai-core';
// Define tools
const weatherTool = defineTool({
name: 'get_weather',
description: 'Get current weather for a location',
parameters: {
type: 'object',
properties: {
location: { type: 'string', description: 'City name' },
units: { type: 'string', enum: ['celsius', 'fahrenheit'] }
},
required: ['location']
},
handler: async ({ location, units = 'celsius' }) => {
// Fetch weather data
return { temp: 22, units, location };
}
});
// Use in graph
const graph = new ManagedCyclicGraph('tool-calling')
.node('llm-with-tools', {
execute: [async (ctx) => {
const response = await ctx.services.llm.chat(
formatMessages([{ role: 'user', content: 'What\'s the weather in Paris?' }]),
{ tools: [weatherTool] }
);
// Execute tool calls
if (response.toolCalls) {
const results = await Promise.all(
response.toolCalls.map(call => executeToolCall(call, [weatherTool]))
);
return { ...ctx, data: { ...ctx.data, toolResults: results } };
}
return ctx;
}]
})
.build();
`
$3
`typescript
import { generateSchema, validateAgainstSchema } from '@quarry-systems/drift-ai-core';
// Define expected output structure
interface UserProfile {
name: string;
age: number;
email: string;
interests: string[];
}
// Generate JSON schema
const schema = generateSchema({
type: 'object',
properties: {
name: { type: 'string' },
age: { type: 'number', minimum: 0 },
email: { type: 'string', format: 'email' },
interests: { type: 'array', items: { type: 'string' } }
},
required: ['name', 'age', 'email']
});
// Request structured output from LLM
const response = await llm.chat(messages, {
responseFormat: { type: 'json_schema', schema }
});
// Validate response
const validation = validateAgainstSchema(response.content, schema);
if (validation.valid) {
const profile: UserProfile = validation.data;
// Use typed data
}
`
Core Utilities
$3
`typescript
import { formatMessages, addSystemMessage, addUserMessage } from '@quarry-systems/drift-ai-core';
// Format message array
const messages = formatMessages([
{ role: 'system', content: 'You are a helpful assistant.' },
{ role: 'user', content: 'Hello!' },
{ role: 'assistant', content: 'Hi! How can I help?' },
{ role: 'user', content: 'Tell me about graphs.' }
]);
// Helper functions
const withSystem = addSystemMessage(messages, 'Be concise.');
const withUser = addUserMessage(messages, 'What is 2+2?');
`
$3
`typescript
import { parseJSONResponse, extractCodeBlocks } from '@quarry-systems/drift-ai-core';
// Parse JSON from LLM response
const data = parseJSONResponse(response.content);
// Extract code blocks
const codeBlocks = extractCodeBlocks(response.content);
// Returns: [{ language: 'typescript', code: '...' }, ...]
`
$3
`typescript
import { handleStreamingResponse } from '@quarry-systems/drift-ai-core';
const stream = await llm.chatStream(messages);
await handleStreamingResponse(stream, {
onToken: (token) => console.log(token),
onComplete: (fullText) => console.log('Complete:', fullText),
onError: (error) => console.error('Error:', error)
});
`
$3
`typescript
import { defineTool, executeToolCall, validateToolCall } from '@quarry-systems/drift-ai-core';
// Define a tool
const calculator = defineTool({
name: 'calculate',
description: 'Perform basic math operations',
parameters: {
type: 'object',
properties: {
operation: { type: 'string', enum: ['add', 'subtract', 'multiply', 'divide'] },
a: { type: 'number' },
b: { type: 'number' }
},
required: ['operation', 'a', 'b']
},
handler: async ({ operation, a, b }) => {
switch (operation) {
case 'add': return a + b;
case 'subtract': return a - b;
case 'multiply': return a * b;
case 'divide': return a / b;
default: throw new Error('Invalid operation');
}
}
});
// Validate tool call
const isValid = validateToolCall(toolCall, calculator);
// Execute tool call
const result = await executeToolCall(toolCall, [calculator]);
`
Advanced Patterns
$3
`typescript
const graph = new ManagedCyclicGraph('conversation')
.node('chat', {
execute: [async (ctx) => {
const history = ctx.data.messages || [];
const newMessage = { role: 'user', content: ctx.data.userInput };
const response = await ctx.services.llm.chat([...history, newMessage]);
return {
...ctx,
data: {
...ctx.data,
messages: [
...history,
newMessage,
{ role: 'assistant', content: response.content }
]
}
};
}]
})
.build();
`
$3
`typescript
import { defineTool, executeToolCall } from '@quarry-systems/drift-ai-core';
const graph = new ManagedCyclicGraph('agent')
.node('think', {
label: 'Agent Reasoning',
execute: [async (ctx) => {
const response = await ctx.services.llm.chat(
ctx.data.messages,
{ tools: ctx.data.availableTools }
);
if (response.toolCalls) {
// Execute tools and continue
const results = await Promise.all(
response.toolCalls.map(call =>
executeToolCall(call, ctx.data.availableTools)
)
);
return {
...ctx,
data: {
...ctx.data,
toolResults: results,
shouldContinue: true
}
};
}
// Final answer
return {
...ctx,
data: {
...ctx.data,
finalAnswer: response.content,
shouldContinue: false
}
};
}]
})
.edge('think', 'think', (ctx) => ctx.data.shouldContinue === true)
.edge('think', 'end', (ctx) => ctx.data.shouldContinue === false)
.build();
`
$3
`typescript
import { generateSchema, validateAgainstSchema } from '@quarry-systems/drift-ai-core';
const extractionSchema = generateSchema({
type: 'object',
properties: {
entities: {
type: 'array',
items: {
type: 'object',
properties: {
name: { type: 'string' },
type: { type: 'string' },
confidence: { type: 'number', minimum: 0, maximum: 1 }
}
}
},
sentiment: { type: 'string', enum: ['positive', 'negative', 'neutral'] }
}
});
const graph = new ManagedCyclicGraph('extraction')
.node('extract', {
execute: [async (ctx) => {
const response = await ctx.services.llm.chat(
formatMessages([
{ role: 'system', content: 'Extract entities and sentiment from text.' },
{ role: 'user', content: ctx.data.text }
]),
{ responseFormat: { type: 'json_schema', schema: extractionSchema } }
);
const validation = validateAgainstSchema(response.content, extractionSchema);
return {
...ctx,
data: {
...ctx.data,
extracted: validation.valid ? validation.data : null,
error: validation.valid ? null : validation.error
}
};
}]
})
.build();
`
API Reference
$3
- formatMessages(messages) - Format message array
- addSystemMessage(messages, content) - Add system message
- addUserMessage(messages, content) - Add user message
- addAssistantMessage(messages, content) - Add assistant message
$3
- defineTool(config) - Define a tool with schema and handler
- executeToolCall(call, tools) - Execute a tool call
- validateToolCall(call, tool) - Validate tool call parameters
$3
- generateSchema(definition) - Generate JSON schema
- validateAgainstSchema(data, schema) - Validate data against schema
$3
- parseJSONResponse(text) - Parse JSON from response
- extractCodeBlocks(text) - Extract code blocks
- handleStreamingResponse(stream, handlers) - Handle streaming
$3
- isValidToolCall(call) - Check if tool call is valid
- isValidMessage(message) - Check if message is valid
- isValidSchema(schema) - Check if schema is valid
Integration with LLM Adapters
Drift AI Core works with any LLM adapter that implements the LLMAdapter interface from @quarry-systems/drift-contracts:
`typescript
import type { LLMAdapter } from '@quarry-systems/drift-contracts';
// Your custom adapter
const myLLMAdapter: LLMAdapter = {
chat: async (messages, options) => {
// Implementation
return {
content: 'Response',
role: 'assistant',
finishReason: 'stop'
};
},
chatStream: async (messages, options) => {
// Streaming implementation
}
};
// Use with Drift
const manager = new Manager(graph, {
services: {
llm: { factory: () => myLLMAdapter }
}
});
``