297 lines
9.4 KiB
TypeScript
297 lines
9.4 KiB
TypeScript
/**
|
|
* Multi-provider example for Simple AI Provider
|
|
* Demonstrates how to use Claude, OpenAI, and Gemini providers
|
|
*/
|
|
|
|
import {
|
|
createClaudeProvider,
|
|
createOpenAIProvider,
|
|
createGeminiProvider,
|
|
createProvider,
|
|
ProviderRegistry,
|
|
AIProviderError,
|
|
AIErrorType
|
|
} from '../src/index.js';
|
|
|
|
async function multiProviderExample() {
|
|
console.log('=== Multi-Provider AI Example ===\n');
|
|
|
|
// Get API keys from environment
|
|
const claudeApiKey = process.env.ANTHROPIC_API_KEY || 'your-claude-api-key';
|
|
const openaiApiKey = process.env.OPENAI_API_KEY || 'your-openai-api-key';
|
|
const geminiApiKey = process.env.GOOGLE_AI_API_KEY || 'your-gemini-api-key';
|
|
|
|
try {
|
|
// Method 1: Using factory functions
|
|
console.log('1. Creating providers using factory functions...');
|
|
|
|
const claude = createClaudeProvider(claudeApiKey, {
|
|
defaultModel: 'claude-3-5-haiku-20241022'
|
|
});
|
|
|
|
const openai = createOpenAIProvider(openaiApiKey, {
|
|
defaultModel: 'gpt-3.5-turbo'
|
|
});
|
|
|
|
const gemini = createGeminiProvider(geminiApiKey, {
|
|
defaultModel: 'gemini-1.5-flash'
|
|
});
|
|
|
|
console.log('✓ Providers created\n');
|
|
|
|
// Method 2: Using generic createProvider function
|
|
console.log('2. Creating providers using generic factory...');
|
|
|
|
const claude2 = createProvider('claude', {
|
|
apiKey: claudeApiKey,
|
|
defaultModel: 'claude-3-5-haiku-20241022'
|
|
});
|
|
|
|
const openai2 = createProvider('openai', {
|
|
apiKey: openaiApiKey,
|
|
defaultModel: 'gpt-3.5-turbo'
|
|
});
|
|
|
|
const gemini2 = createProvider('gemini', {
|
|
apiKey: geminiApiKey,
|
|
defaultModel: 'gemini-1.5-flash'
|
|
});
|
|
|
|
console.log('✓ Generic providers created\n');
|
|
|
|
// Method 3: Using Provider Registry
|
|
console.log('3. Using Provider Registry...');
|
|
console.log('Registered providers:', ProviderRegistry.getRegisteredProviders());
|
|
|
|
const claudeFromRegistry = ProviderRegistry.create('claude', {
|
|
apiKey: claudeApiKey,
|
|
defaultModel: 'claude-3-5-haiku-20241022'
|
|
});
|
|
|
|
console.log('✓ Provider created from registry\n');
|
|
|
|
// Initialize providers
|
|
console.log('4. Initializing providers...');
|
|
await Promise.all([
|
|
claude.initialize(),
|
|
openai.initialize(),
|
|
gemini.initialize()
|
|
]);
|
|
console.log('✓ All providers initialized\n');
|
|
|
|
// Compare provider information
|
|
console.log('5. Provider Information:');
|
|
console.log('Claude Info:', claude.getInfo());
|
|
console.log('OpenAI Info:', openai.getInfo());
|
|
console.log('Gemini Info:', gemini.getInfo());
|
|
console.log();
|
|
|
|
// Test the same prompt with all providers
|
|
const testPrompt = 'Explain the concept of recursion in programming in one sentence.';
|
|
|
|
console.log('6. Testing same prompt with all providers...');
|
|
console.log(`Prompt: "${testPrompt}"\n`);
|
|
|
|
// Claude response
|
|
console.log('--- Claude Response ---');
|
|
const claudeResponse = await claude.complete({
|
|
messages: [
|
|
{ role: 'system', content: 'You are a concise programming tutor.' },
|
|
{ role: 'user', content: testPrompt }
|
|
],
|
|
maxTokens: 100,
|
|
temperature: 0.7
|
|
});
|
|
|
|
console.log('Response:', claudeResponse.content);
|
|
console.log('Usage:', claudeResponse.usage);
|
|
console.log('Model:', claudeResponse.model);
|
|
console.log();
|
|
|
|
// OpenAI response
|
|
console.log('--- OpenAI Response ---');
|
|
const openaiResponse = await openai.complete({
|
|
messages: [
|
|
{ role: 'system', content: 'You are a concise programming tutor.' },
|
|
{ role: 'user', content: testPrompt }
|
|
],
|
|
maxTokens: 100,
|
|
temperature: 0.7
|
|
});
|
|
|
|
console.log('Response:', openaiResponse.content);
|
|
console.log('Usage:', openaiResponse.usage);
|
|
console.log('Model:', openaiResponse.model);
|
|
console.log();
|
|
|
|
// Gemini response
|
|
console.log('--- Gemini Response ---');
|
|
const geminiResponse = await gemini.complete({
|
|
messages: [
|
|
{ role: 'system', content: 'You are a concise programming tutor.' },
|
|
{ role: 'user', content: testPrompt }
|
|
],
|
|
maxTokens: 100,
|
|
temperature: 0.7
|
|
});
|
|
|
|
console.log('Response:', geminiResponse.content);
|
|
console.log('Usage:', geminiResponse.usage);
|
|
console.log('Model:', geminiResponse.model);
|
|
console.log();
|
|
|
|
// Streaming comparison
|
|
console.log('7. Streaming comparison...');
|
|
console.log('Streaming from Claude:');
|
|
|
|
for await (const chunk of claude.stream({
|
|
messages: [{ role: 'user', content: 'Count from 1 to 5.' }],
|
|
maxTokens: 50
|
|
})) {
|
|
if (!chunk.isComplete) {
|
|
process.stdout.write(chunk.content);
|
|
} else {
|
|
console.log('\n✓ Claude streaming complete\n');
|
|
}
|
|
}
|
|
|
|
console.log('Streaming from OpenAI:');
|
|
|
|
for await (const chunk of openai.stream({
|
|
messages: [{ role: 'user', content: 'Count from 1 to 5.' }],
|
|
maxTokens: 50
|
|
})) {
|
|
if (!chunk.isComplete) {
|
|
process.stdout.write(chunk.content);
|
|
} else {
|
|
console.log('\n✓ OpenAI streaming complete\n');
|
|
}
|
|
}
|
|
|
|
console.log('Streaming from Gemini:');
|
|
|
|
for await (const chunk of gemini.stream({
|
|
messages: [{ role: 'user', content: 'Count from 1 to 5.' }],
|
|
maxTokens: 50
|
|
})) {
|
|
if (!chunk.isComplete) {
|
|
process.stdout.write(chunk.content);
|
|
} else {
|
|
console.log('\n✓ Gemini streaming complete\n');
|
|
}
|
|
}
|
|
|
|
// Provider-specific features demo
|
|
console.log('8. Provider-specific features...');
|
|
|
|
// OpenAI with organization
|
|
const openaiWithOrg = createOpenAIProvider(openaiApiKey, {
|
|
defaultModel: 'gpt-3.5-turbo',
|
|
organization: 'org-example',
|
|
timeout: 60000
|
|
});
|
|
console.log('✓ Created OpenAI provider with organization settings');
|
|
|
|
// Claude with custom version
|
|
const claudeCustom = createClaudeProvider(claudeApiKey, {
|
|
defaultModel: 'claude-3-5-sonnet-20241022',
|
|
version: '2023-06-01',
|
|
maxRetries: 5
|
|
});
|
|
console.log('✓ Created Claude provider with custom settings');
|
|
|
|
// Gemini with safety settings
|
|
const geminiCustom = createGeminiProvider(geminiApiKey, {
|
|
defaultModel: 'gemini-1.5-pro',
|
|
safetySettings: [],
|
|
generationConfig: {
|
|
temperature: 0.9,
|
|
topP: 0.8,
|
|
topK: 40
|
|
}
|
|
});
|
|
console.log('✓ Created Gemini provider with safety and generation settings');
|
|
|
|
console.log('\n🎉 Multi-provider example completed successfully!');
|
|
|
|
} catch (error) {
|
|
console.error('❌ Error in multi-provider example:');
|
|
|
|
if (error instanceof AIProviderError) {
|
|
console.error(`AI Provider Error (${error.type}):`, error.message);
|
|
|
|
switch (error.type) {
|
|
case AIErrorType.AUTHENTICATION:
|
|
console.error('💡 Hint: Check your API keys in environment variables');
|
|
console.error(' Set ANTHROPIC_API_KEY, OPENAI_API_KEY, and GOOGLE_AI_API_KEY');
|
|
break;
|
|
case AIErrorType.RATE_LIMIT:
|
|
console.error('💡 Hint: You are being rate limited. Wait and try again.');
|
|
break;
|
|
case AIErrorType.INVALID_REQUEST:
|
|
console.error('💡 Hint: Check your request parameters.');
|
|
break;
|
|
default:
|
|
console.error('💡 Hint: An unexpected error occurred.');
|
|
}
|
|
} else {
|
|
console.error('Unexpected error:', error);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Provider comparison utility
|
|
async function compareProviders() {
|
|
console.log('\n=== Provider Comparison ===');
|
|
|
|
const providers = [
|
|
{ name: 'Claude', factory: () => createClaudeProvider('dummy-key') },
|
|
{ name: 'OpenAI', factory: () => createOpenAIProvider('dummy-key') },
|
|
{ name: 'Gemini', factory: () => createGeminiProvider('dummy-key') }
|
|
];
|
|
|
|
console.log('\nProvider Capabilities:');
|
|
console.log('| Provider | Models | Context | Streaming | Vision | Functions | Multimodal |');
|
|
console.log('|----------|--------|---------|-----------|--------|-----------|------------|');
|
|
|
|
for (const { name, factory } of providers) {
|
|
const provider = factory();
|
|
const info = provider.getInfo();
|
|
|
|
const contextStr = info.maxContextLength >= 1000000
|
|
? `${(info.maxContextLength / 1000000).toFixed(1)}M`
|
|
: `${(info.maxContextLength / 1000).toFixed(0)}K`;
|
|
|
|
console.log(`| ${name.padEnd(8)} | ${info.models.length.toString().padEnd(6)} | ${contextStr.padEnd(7)} | ${info.supportsStreaming ? '✓' : '✗'.padEnd(9)} | ${info.capabilities?.vision ? '✓' : '✗'.padEnd(6)} | ${info.capabilities?.functionCalling ? '✓' : '✗'.padEnd(9)} | ${info.capabilities?.multimodal ? '✓' : '✗'.padEnd(10)} |`);
|
|
}
|
|
|
|
console.log();
|
|
}
|
|
|
|
// Feature comparison
|
|
async function featureComparison() {
|
|
console.log('\n=== Feature Comparison ===');
|
|
|
|
const features = [
|
|
['Provider', 'Context Window', 'Streaming', 'Vision', 'Function Calling', 'System Messages', 'Special Features'],
|
|
['Claude', '200K tokens', '✅', '✅', '✅', '✅ (separate)', 'Advanced reasoning'],
|
|
['OpenAI', '128K tokens', '✅', '✅', '✅', '✅ (inline)', 'JSON mode, plugins'],
|
|
['Gemini', '1M tokens', '✅', '✅', '✅', '✅ (separate)', 'Largest context, multimodal']
|
|
];
|
|
|
|
for (const row of features) {
|
|
console.log('| ' + row.map(cell => cell.padEnd(15)).join(' | ') + ' |');
|
|
if (row[0] === 'Provider') {
|
|
console.log('|' + ''.padEnd(row.length * 17 + row.length - 1, '-') + '|');
|
|
}
|
|
}
|
|
|
|
console.log();
|
|
}
|
|
|
|
// Run the examples
|
|
if (import.meta.main) {
|
|
await multiProviderExample();
|
|
await compareProviders();
|
|
await featureComparison();
|
|
}
|