Files
simple-ai-provider/src/providers/claude.ts

323 lines
9.1 KiB
TypeScript

/**
* Claude AI Provider implementation using Anthropic's API
* Provides integration with Claude models through a standardized interface
*/
import Anthropic from '@anthropic-ai/sdk';
import type {
AIProviderConfig,
CompletionParams,
CompletionResponse,
CompletionChunk,
ProviderInfo,
AIMessage
} from '../types/index.js';
import { BaseAIProvider } from './base.js';
import { AIProviderError, AIErrorType } from '../types/index.js';
/**
* Configuration specific to Claude provider
*/
export interface ClaudeConfig extends AIProviderConfig {
/** Default model to use if not specified in requests (default: claude-3-5-sonnet-20241022) */
defaultModel?: string;
/** Anthropic API version (default: 2023-06-01) */
version?: string;
}
/**
* Claude AI provider implementation
*/
export class ClaudeProvider extends BaseAIProvider {
private client: Anthropic | null = null;
private readonly defaultModel: string;
private readonly version: string;
constructor(config: ClaudeConfig) {
super(config);
this.defaultModel = config.defaultModel || 'claude-3-5-sonnet-20241022';
this.version = config.version || '2023-06-01';
}
/**
* Initialize the Claude provider by setting up the Anthropic client
*/
protected async doInitialize(): Promise<void> {
try {
this.client = new Anthropic({
apiKey: this.config.apiKey,
baseURL: this.config.baseUrl,
timeout: this.config.timeout,
maxRetries: this.config.maxRetries,
defaultHeaders: {
'anthropic-version': this.version
}
});
// Test the connection by making a simple request
await this.validateConnection();
} catch (error) {
throw new AIProviderError(
`Failed to initialize Claude provider: ${(error as Error).message}`,
AIErrorType.AUTHENTICATION,
undefined,
error as Error
);
}
}
/**
* Generate a completion using Claude
*/
protected async doComplete(params: CompletionParams): Promise<CompletionResponse> {
if (!this.client) {
throw new AIProviderError('Client not initialized', AIErrorType.INVALID_REQUEST);
}
try {
const { system, messages } = this.convertMessages(params.messages);
const response = await this.client.messages.create({
model: params.model || this.defaultModel,
max_tokens: params.maxTokens || 1000,
temperature: params.temperature ?? 0.7,
top_p: params.topP,
stop_sequences: params.stopSequences,
system: system || undefined,
messages: messages.map(msg => ({
role: msg.role as 'user' | 'assistant',
content: msg.content
}))
});
return this.formatCompletionResponse(response);
} catch (error) {
throw this.handleAnthropicError(error as Error);
}
}
/**
* Generate a streaming completion using Claude
*/
protected async *doStream(params: CompletionParams): AsyncIterable<CompletionChunk> {
if (!this.client) {
throw new AIProviderError('Client not initialized', AIErrorType.INVALID_REQUEST);
}
try {
const { system, messages } = this.convertMessages(params.messages);
const stream = await this.client.messages.create({
model: params.model || this.defaultModel,
max_tokens: params.maxTokens || 1000,
temperature: params.temperature ?? 0.7,
top_p: params.topP,
stop_sequences: params.stopSequences,
system: system || undefined,
messages: messages.map(msg => ({
role: msg.role as 'user' | 'assistant',
content: msg.content
})),
stream: true
});
let content = '';
let messageId = '';
for await (const chunk of stream) {
if (chunk.type === 'message_start') {
messageId = chunk.message.id;
} else if (chunk.type === 'content_block_delta' && chunk.delta.type === 'text_delta') {
content += chunk.delta.text;
yield {
content: chunk.delta.text,
isComplete: false,
id: messageId
};
} else if (chunk.type === 'message_delta' && chunk.usage) {
// Final chunk with usage information
yield {
content: '',
isComplete: true,
id: messageId,
usage: {
promptTokens: chunk.usage.input_tokens || 0,
completionTokens: chunk.usage.output_tokens || 0,
totalTokens: (chunk.usage.input_tokens || 0) + (chunk.usage.output_tokens || 0)
}
};
}
}
} catch (error) {
throw this.handleAnthropicError(error as Error);
}
}
/**
* Get information about the Claude provider
*/
public getInfo(): ProviderInfo {
return {
name: 'Claude',
version: '1.0.0',
models: [
'claude-3-5-sonnet-20241022',
'claude-3-5-haiku-20241022',
'claude-3-opus-20240229',
'claude-3-sonnet-20240229',
'claude-3-haiku-20240307'
],
maxContextLength: 200000, // Claude 3.5 Sonnet context length
supportsStreaming: true,
capabilities: {
vision: true,
functionCalling: true,
systemMessages: true
}
};
}
/**
* Validate the connection by making a simple request
*/
private async validateConnection(): Promise<void> {
if (!this.client) {
throw new Error('Client not initialized');
}
try {
// Make a minimal request to validate credentials
await this.client.messages.create({
model: this.defaultModel,
max_tokens: 1,
messages: [{ role: 'user', content: 'Hi' }]
});
} catch (error: any) {
if (error.status === 401 || error.status === 403) {
throw new AIProviderError(
'Invalid API key. Please check your Anthropic API key.',
AIErrorType.AUTHENTICATION,
error.status
);
}
// For other errors during validation, we'll let initialization proceed
// as they might be temporary issues
}
}
/**
* Convert our generic message format to Claude's format
* Claude requires system messages to be separate from the conversation
*/
private convertMessages(messages: AIMessage[]): { system: string | null; messages: AIMessage[] } {
let system: string | null = null;
const conversationMessages: AIMessage[] = [];
for (const message of messages) {
if (message.role === 'system') {
// Combine multiple system messages
if (system) {
system += '\n\n' + message.content;
} else {
system = message.content;
}
} else {
conversationMessages.push(message);
}
}
return { system, messages: conversationMessages };
}
/**
* Format Anthropic's response to our standard format
*/
private formatCompletionResponse(response: any): CompletionResponse {
const content = response.content
.filter((block: any) => block.type === 'text')
.map((block: any) => block.text)
.join('');
return {
content,
model: response.model,
usage: {
promptTokens: response.usage.input_tokens,
completionTokens: response.usage.output_tokens,
totalTokens: response.usage.input_tokens + response.usage.output_tokens
},
id: response.id,
metadata: {
stopReason: response.stop_reason,
stopSequence: response.stop_sequence
}
};
}
/**
* Handle Anthropic-specific errors and convert them to our standard format
*/
private handleAnthropicError(error: any): AIProviderError {
if (error instanceof AIProviderError) {
return error;
}
const status = error.status || error.statusCode;
const message = error.message || 'Unknown Anthropic API error';
switch (status) {
case 400:
return new AIProviderError(
`Invalid request: ${message}`,
AIErrorType.INVALID_REQUEST,
status,
error
);
case 401:
return new AIProviderError(
'Authentication failed. Please check your Anthropic API key.',
AIErrorType.AUTHENTICATION,
status,
error
);
case 403:
return new AIProviderError(
'Access forbidden. Please check your API key permissions.',
AIErrorType.AUTHENTICATION,
status,
error
);
case 404:
return new AIProviderError(
'Model not found. Please check the model name.',
AIErrorType.MODEL_NOT_FOUND,
status,
error
);
case 429:
return new AIProviderError(
'Rate limit exceeded. Please slow down your requests.',
AIErrorType.RATE_LIMIT,
status,
error
);
case 500:
case 502:
case 503:
case 504:
return new AIProviderError(
'Anthropic service temporarily unavailable. Please try again later.',
AIErrorType.NETWORK,
status,
error
);
default:
return new AIProviderError(
`Anthropic API error: ${message}`,
AIErrorType.UNKNOWN,
status,
error
);
}
}
}