Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Add Amazon Bedrock Claude support #264

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -43,5 +43,14 @@ OPENAI_LIKE_API_KEY=
# You only need this environment variable set if you want to use Mistral models
MISTRAL_API_KEY=



# Get your AWS Access Key ID and Secret Access Key
# You can use us-east-1 or us-west-2 region
AMAZON_BEDROCK_REGION=us-west-2
AMAZON_BEDROCK_ACCESS_KEY_ID=
AMAZON_BEDROCK_SECRET_ACCESS_KEY=


# Include this environment variable if you want more logging for debugging locally
VITE_LOG_LEVEL=debug
17 changes: 17 additions & 0 deletions app/lib/.server/llm/api-key.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,23 @@
// Preventing TS checks with files presented in the video for a better presentation.
import { env } from 'node:process';



interface AmazonBedrockCredentials {
region: string;
accessKeyId: string;
secretAccessKey: string;
}

export function getAmazonBedrockCredentials(cloudflareEnv: Env): AmazonBedrockCredentials {

return {
region: env.AMAZON_BEDROCK_REGION || cloudflareEnv.AMAZON_BEDROCK_REGION,
accessKeyId: env.AMAZON_BEDROCK_ACCESS_KEY_ID || cloudflareEnv.AMAZON_BEDROCK_ACCESS_KEY_ID,
secretAccessKey: env.AMAZON_BEDROCK_SECRET_ACCESS_KEY || cloudflareEnv.AMAZON_BEDROCK_SECRET_ACCESS_KEY,
};
}

export function getAPIKey(cloudflareEnv: Env, provider: string) {
/**
* The `cloudflareEnv` is only used when deployed or when previewing locally.
Expand Down
24 changes: 20 additions & 4 deletions app/lib/.server/llm/model.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
import { getAPIKey, getBaseURL,getAmazonBedrockCredentials } from '~/lib/.server/llm/api-key';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
import { ollama } from 'ollama-ai-provider';
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { mistral } from '@ai-sdk/mistral';
import { createMistral } from '@ai-sdk/mistral';
import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock';


export function getAnthropicModel(apiKey: string, model: string) {
const anthropic = createAnthropic({
Expand All @@ -16,6 +18,17 @@ export function getAnthropicModel(apiKey: string, model: string) {

return anthropic(model);
}

export function getBedrockModel(region: string, accessKeyId: string, secretAccessKey: string, model: string) {

const bedrock = createAmazonBedrock({
region,
accessKeyId,
secretAccessKey,
});

return bedrock(model);
}
export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) {
const openai = createOpenAI({
baseURL,
Expand Down Expand Up @@ -83,6 +96,7 @@ export function getOpenRouterModel(apiKey: string, model: string) {
export function getModel(provider: string, model: string, env: Env) {
const apiKey = getAPIKey(env, provider);
const baseURL = getBaseURL(env, provider);
const amazonBedrockCredentials = getAmazonBedrockCredentials(env);

switch (provider) {
case 'Anthropic':
Expand All @@ -94,13 +108,15 @@ export function getModel(provider: string, model: string, env: Env) {
case 'OpenRouter':
return getOpenRouterModel(apiKey, model);
case 'Google':
return getGoogleModel(apiKey, model)
return getGoogleModel(apiKey, model);
case 'OpenAILike':
return getOpenAILikeModel(baseURL,apiKey, model);
return getOpenAILikeModel(baseURL, apiKey, model);
case 'Deepseek':
return getDeepseekModel(apiKey, model)
case 'Mistral':
return getMistralModel(apiKey, model);
return getMistralModel(apiKey, model);
case 'AmazonBedrock':
return getBedrockModel(amazonBedrockCredentials.region, amazonBedrockCredentials.accessKeyId, amazonBedrockCredentials.secretAccessKey, model);
default:
return getOllamaModel(baseURL, model);
}
Expand Down
4 changes: 2 additions & 2 deletions app/lib/.server/llm/stream-text.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,11 @@ export function streamText(messages: Messages, env: Env, options?: StreamingOpti
});

const provider = MODEL_LIST.find((model) => model.name === currentModel)?.provider || DEFAULT_PROVIDER;

const maxTokens = provider === 'AmazonBedrock' ? 4096 : MAX_TOKENS;
return _streamText({
model: getModel(provider, currentModel, env),
system: getSystemPrompt(),
maxTokens: MAX_TOKENS,
maxTokens: maxTokens,
// headers: {
// 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
// },
Expand Down
10 changes: 7 additions & 3 deletions app/utils/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@ export const WORK_DIR_NAME = 'project';
export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
export const DEFAULT_MODEL = 'claude-3-5-sonnet-20240620';
export const DEFAULT_PROVIDER = 'Anthropic';
//export const DEFAULT_MODEL = 'claude-3-5-sonnet-20240620';
//export const DEFAULT_PROVIDER = 'Anthropic';
export const DEFAULT_MODEL = 'anthropic.claude-3-5-haiku-20241022-v1:0';
export const DEFAULT_PROVIDER = 'AmazonBedrock';

const staticModels: ModelInfo[] = [
{ name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' },
Expand Down Expand Up @@ -43,6 +45,9 @@ const staticModels: ModelInfo[] = [
{ name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral' },
{ name: 'codestral-latest', label: 'Codestral', provider: 'Mistral' },
{ name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral' },
{ name: 'anthropic.claude-3-5-sonnet-20241022-v2:0', label: 'Claude 3.5 Sonnet v2', provider: 'AmazonBedrock' },
{ name: 'anthropic.claude-3-5-sonnet-20240620-v1:0', label: 'Claude 3.5 Sonnet v1', provider: 'AmazonBedrock' },
{ name: 'anthropic.claude-3-5-haiku-20241022-v1:0', label: 'Claude 3.5 Haiku', provider: 'AmazonBedrock' },
];

export let MODEL_LIST: ModelInfo[] = [...staticModels];
Expand All @@ -52,7 +57,6 @@ async function getOllamaModels(): Promise<ModelInfo[]> {
const base_url = import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
const response = await fetch(`${base_url}/api/tags`);
const data = await response.json() as OllamaApiResponse;

return data.models.map((model: OllamaModel) => ({
name: model.name,
label: `${model.name} (${model.details.parameter_size})`,
Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,11 @@
"node": ">=18.18.0"
},
"dependencies": {
"@ai-sdk/amazon-bedrock": "^0.0.36",
"@ai-sdk/anthropic": "^0.0.39",
"@ai-sdk/google": "^0.0.52",
"@ai-sdk/openai": "^0.0.66",
"@ai-sdk/mistral": "^0.0.43",
"@ai-sdk/openai": "^0.0.66",
"@codemirror/autocomplete": "^6.17.0",
"@codemirror/commands": "^6.6.0",
"@codemirror/lang-cpp": "^6.0.2",
Expand Down
Loading