Skip to content

Commit

Permalink
🚀 [feat] Integrate Amazon Bedrock support
Browse files Browse the repository at this point in the history
Add support for Amazon Bedrock models, including:

- Implement AWS credentials retrieval for Bedrock
- Add Bedrock model initialization and handling
- Include Claude 3 models (Opus, Sonnet, Haiku) for Bedrock
- Adjust token limits for Bedrock models
- Update chat action to support model selection
- Add @ai-sdk/amazon-bedrock dependency

Key changes:
- app/lib/.server/llm/api-key.ts: Add getAWSCredentials function
- app/lib/.server/llm/constants.ts: Define MAX_TOKENS_BEDROCK
- app/lib/.server/llm/model.ts: Implement getBedrockModel function
- app/lib/.server/llm/stream-text.ts: Use Bedrock-specific token limit
- app/routes/api.chat.ts: Update to support model selection
- app/utils/constants.ts: Add Bedrock model options
- package.json: Add @ai-sdk/amazon-bedrock dependency
- pnpm-lock.yaml: Update with new dependencies
  • Loading branch information
Sunwood-ai-labs committed Oct 17, 2024
1 parent 4f7a06f commit d86eaa4
Show file tree
Hide file tree
Showing 8 changed files with 1,080 additions and 9 deletions.
9 changes: 9 additions & 0 deletions app/lib/.server/llm/api-key.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,16 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY;
case 'OpenRouter':
return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY;
// Bedrock用のAWSクレデンシャルは別途取得
default:
return "";
}
}

export function getAWSCredentials(cloudflareEnv: Env) {
return {
accessKeyId: env.AWS_ACCESS_KEY_ID || cloudflareEnv.AWS_ACCESS_KEY_ID,
secretAccessKey: env.AWS_SECRET_ACCESS_KEY || cloudflareEnv.AWS_SECRET_ACCESS_KEY,
region: env.AWS_REGION || cloudflareEnv.AWS_REGION || 'us-east-1', // デフォルトリージョン
};
}
1 change: 1 addition & 0 deletions app/lib/.server/llm/constants.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
// see https://docs.anthropic.com/en/docs/about-claude/models
export const MAX_TOKENS = 8192;
export const MAX_TOKENS_BEDROCK = 4096;

// limits the number of model responses that can be returned in a single request
export const MAX_RESPONSE_SEGMENTS = 2;
18 changes: 17 additions & 1 deletion app/lib/.server/llm/model.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
import { getAPIKey } from '~/lib/.server/llm/api-key';
import { getAPIKey, getAWSCredentials } from '~/lib/.server/llm/api-key';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock';
import { ollama } from 'ollama-ai-provider';
import { createOpenRouter } from "@openrouter/ai-sdk-provider";

Expand Down Expand Up @@ -43,7 +44,22 @@ export function getOpenRouterModel(apiKey: string, model: string) {
return openRouter.chat(model);
}

export function getBedrockModel(modelId: string, credentials: any) {
const bedrock = createAmazonBedrock({
region: credentials.region,
accessKeyId: credentials.accessKeyId,
secretAccessKey: credentials.secretAccessKey,
});

return bedrock(modelId);
}

export function getModel(provider: string, model: string, env: Env) {
if (provider === 'Bedrock') {
const credentials = getAWSCredentials(env);
return getBedrockModel(model, credentials);
}

const apiKey = getAPIKey(env, provider);

switch (provider) {
Expand Down
8 changes: 3 additions & 5 deletions app/lib/.server/llm/stream-text.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// Preventing TS checks with files presented in the video for a better presentation.
import { streamText as _streamText, convertToCoreMessages } from 'ai';
import { getModel } from '~/lib/.server/llm/model';
import { MAX_TOKENS } from './constants';
import { MAX_TOKENS, MAX_TOKENS_BEDROCK } from './constants';
import { getSystemPrompt } from './prompts';
import { MODEL_LIST, DEFAULT_MODEL, DEFAULT_PROVIDER } from '~/utils/constants';

Expand Down Expand Up @@ -52,14 +52,12 @@ export function streamText(messages: Messages, env: Env, options?: StreamingOpti
});

const provider = MODEL_LIST.find((model) => model.name === currentModel)?.provider || DEFAULT_PROVIDER;
const maxTokens = provider === 'Bedrock' ? MAX_TOKENS_BEDROCK : MAX_TOKENS;

return _streamText({
model: getModel(provider, currentModel, env),
system: getSystemPrompt(),
maxTokens: MAX_TOKENS,
// headers: {
// 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
// },
maxTokens,
messages: convertToCoreMessages(processedMessages),
...options,
});
Expand Down
4 changes: 2 additions & 2 deletions app/routes/api.chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export async function action(args: ActionFunctionArgs) {
}

async function chatAction({ context, request }: ActionFunctionArgs) {
const { messages } = await request.json<{ messages: Messages }>();
const { messages, selectedModel } = await request.json<{ messages: Messages; selectedModel?: string }>();

const stream = new SwitchableStream();

Expand Down Expand Up @@ -47,7 +47,7 @@ async function chatAction({ context, request }: ActionFunctionArgs) {
return new Response(stream.readable, {
status: 200,
headers: {
contentType: 'text/plain; charset=utf-8',
'Content-Type': 'text/plain; charset=utf-8',
},
});
} catch (error) {
Expand Down
6 changes: 5 additions & 1 deletion app/utils/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,8 @@ export const MODEL_LIST = [
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
{ name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
];
{ name: 'anthropic.claude-3-5-sonnet-20240620-v1:0', label: 'Claude 3.5 Sonnet (Bedrock)', provider: 'Bedrock' },
{ name: 'anthropic.claude-3-opus-20240229-v1:0', label: 'Claude 3 Opus (Bedrock)', provider: 'Bedrock' },
{ name: 'anthropic.claude-3-sonnet-20240229-v1:0', label: 'Claude 3 Sonnet (Bedrock)', provider: 'Bedrock' },
{ name: 'anthropic.claude-3-haiku-20240307-v1:0', label: 'Claude 3 Haiku (Bedrock)', provider: 'Bedrock' },
];
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
"node": ">=18.18.0"
},
"dependencies": {
"@ai-sdk/amazon-bedrock": "^0.0.30",
"@ai-sdk/anthropic": "^0.0.39",
"@ai-sdk/openai": "^0.0.66",
"@codemirror/autocomplete": "^6.17.0",
Expand Down
Loading

0 comments on commit d86eaa4

Please sign in to comment.