diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ef4141cd85..eaa99beb3c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -31,8 +31,11 @@ deployed using [CloudFlare Pages](https://pages.cloudflare.com/) and ### AI SDK Integration Bolt uses the [AI SDK](https://github.com/vercel/ai) to integrate with AI -models. At this time, Bolt supports using Anthropic's Claude Sonnet 3.5. -You can get an API key from the [Anthropic API Console](https://console.anthropic.com/) to use with Bolt. +models. Bolt supports the following LLM providers: + +- **Anthropic** (default): Claude Sonnet 3.5. Get an API key from the [Anthropic API Console](https://console.anthropic.com/). +- **MiniMax**: MiniMax-M2.7 (default) and MiniMax-M2.7-highspeed. Get an API key from the [MiniMax Platform](https://platform.minimax.io/). + Take a look at how [Bolt uses the AI SDK](https://github.com/stackblitz/bolt.new/tree/main/app/lib/.server/llm) ## Prerequisites @@ -56,10 +59,15 @@ git clone https://github.com/stackblitz/bolt.new.git pnpm install ``` -3. Create a `.env.local` file in the root directory and add your Anthropic API key: +3. Create a `.env.local` file in the root directory and add your API key: -``` +```bash +# Default: Anthropic (Claude) ANTHROPIC_API_KEY=XXX + +# To use MiniMax instead, set: +# DEFAULT_LLM_PROVIDER=minimax +# MINIMAX_API_KEY=XXX ``` Optionally, you can set the debug level: diff --git a/app/lib/.server/llm/api-key.spec.ts b/app/lib/.server/llm/api-key.spec.ts new file mode 100644 index 0000000000..fce061c2aa --- /dev/null +++ b/app/lib/.server/llm/api-key.spec.ts @@ -0,0 +1,128 @@ +import { describe, expect, it, beforeEach, afterEach } from 'vitest'; +import { env } from 'node:process'; +import { getAPIKey, getMiniMaxAPIKey, getLLMProvider, getProviderAPIKey } from './api-key'; + +describe('api-key', () => { + const savedAnthropicKey = env.ANTHROPIC_API_KEY; + const savedMiniMaxKey = env.MINIMAX_API_KEY; + const savedProvider = env.DEFAULT_LLM_PROVIDER; + + beforeEach(() => { + delete env.ANTHROPIC_API_KEY; + delete env.MINIMAX_API_KEY; + delete env.DEFAULT_LLM_PROVIDER; + }); + + afterEach(() => { + // Restore original values + if (savedAnthropicKey !== undefined) { + env.ANTHROPIC_API_KEY = savedAnthropicKey; + } else { + delete env.ANTHROPIC_API_KEY; + } + + if (savedMiniMaxKey !== undefined) { + env.MINIMAX_API_KEY = savedMiniMaxKey; + } else { + delete env.MINIMAX_API_KEY; + } + + if (savedProvider !== undefined) { + env.DEFAULT_LLM_PROVIDER = savedProvider; + } else { + delete env.DEFAULT_LLM_PROVIDER; + } + }); + + const createMockEnv = (overrides: Partial = {}): Env => ({ + ANTHROPIC_API_KEY: '', + ...overrides, + }); + + describe('getAPIKey', () => { + it('returns Anthropic API key from process.env', () => { + env.ANTHROPIC_API_KEY = 'env-anthropic-key'; + + expect(getAPIKey(createMockEnv())).toBe('env-anthropic-key'); + }); + + it('falls back to cloudflare env', () => { + expect(getAPIKey(createMockEnv({ ANTHROPIC_API_KEY: 'cf-key' }))).toBe('cf-key'); + }); + + it('prefers process.env over cloudflare env', () => { + env.ANTHROPIC_API_KEY = 'env-key'; + + expect(getAPIKey(createMockEnv({ ANTHROPIC_API_KEY: 'cf-key' }))).toBe('env-key'); + }); + }); + + describe('getMiniMaxAPIKey', () => { + it('returns MiniMax API key from process.env', () => { + env.MINIMAX_API_KEY = 'env-minimax-key'; + + expect(getMiniMaxAPIKey(createMockEnv())).toBe('env-minimax-key'); + }); + + it('falls back to cloudflare env', () => { + const cfEnv = createMockEnv() as Env & { MINIMAX_API_KEY: string }; + cfEnv.MINIMAX_API_KEY = 'cf-minimax-key'; + + expect(getMiniMaxAPIKey(cfEnv)).toBe('cf-minimax-key'); + }); + + it('returns empty string when not configured', () => { + expect(getMiniMaxAPIKey(createMockEnv())).toBe(''); + }); + }); + + describe('getLLMProvider', () => { + it('defaults to anthropic', () => { + expect(getLLMProvider(createMockEnv())).toBe('anthropic'); + }); + + it('returns minimax when configured via process.env', () => { + env.DEFAULT_LLM_PROVIDER = 'minimax'; + + expect(getLLMProvider(createMockEnv())).toBe('minimax'); + }); + + it('is case-insensitive', () => { + env.DEFAULT_LLM_PROVIDER = 'MiniMax'; + + expect(getLLMProvider(createMockEnv())).toBe('minimax'); + }); + + it('falls back to cloudflare env', () => { + const cfEnv = createMockEnv() as Env & { DEFAULT_LLM_PROVIDER: string }; + cfEnv.DEFAULT_LLM_PROVIDER = 'minimax'; + + expect(getLLMProvider(cfEnv)).toBe('minimax'); + }); + + it('returns anthropic for unknown providers', () => { + env.DEFAULT_LLM_PROVIDER = 'unknown-provider'; + + expect(getLLMProvider(createMockEnv())).toBe('anthropic'); + }); + }); + + describe('getProviderAPIKey', () => { + it('returns anthropic provider and key by default', () => { + env.ANTHROPIC_API_KEY = 'anthropic-key'; + + const result = getProviderAPIKey(createMockEnv()); + + expect(result).toEqual({ provider: 'anthropic', apiKey: 'anthropic-key' }); + }); + + it('returns minimax provider and key when configured', () => { + env.DEFAULT_LLM_PROVIDER = 'minimax'; + env.MINIMAX_API_KEY = 'minimax-key'; + + const result = getProviderAPIKey(createMockEnv()); + + expect(result).toEqual({ provider: 'minimax', apiKey: 'minimax-key' }); + }); + }); +}); diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts index 863f763673..62c71c19d2 100644 --- a/app/lib/.server/llm/api-key.ts +++ b/app/lib/.server/llm/api-key.ts @@ -1,4 +1,5 @@ import { env } from 'node:process'; +import type { LLMProvider } from './model'; export function getAPIKey(cloudflareEnv: Env) { /** @@ -7,3 +8,29 @@ export function getAPIKey(cloudflareEnv: Env) { */ return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY; } + +export function getMiniMaxAPIKey(cloudflareEnv: Env) { + return env.MINIMAX_API_KEY || cloudflareEnv.MINIMAX_API_KEY || ''; +} + +export function getLLMProvider(cloudflareEnv: Env): LLMProvider { + const provider = (env.DEFAULT_LLM_PROVIDER || cloudflareEnv.DEFAULT_LLM_PROVIDER || 'anthropic').toLowerCase(); + + if (provider === 'minimax') { + return 'minimax'; + } + + return 'anthropic'; +} + +export function getProviderAPIKey(cloudflareEnv: Env): { provider: LLMProvider; apiKey: string } { + const provider = getLLMProvider(cloudflareEnv); + + switch (provider) { + case 'minimax': + return { provider, apiKey: getMiniMaxAPIKey(cloudflareEnv) }; + case 'anthropic': + default: + return { provider: 'anthropic', apiKey: getAPIKey(cloudflareEnv) }; + } +} diff --git a/app/lib/.server/llm/minimax.e2e.spec.ts b/app/lib/.server/llm/minimax.e2e.spec.ts new file mode 100644 index 0000000000..b36fde3ee9 --- /dev/null +++ b/app/lib/.server/llm/minimax.e2e.spec.ts @@ -0,0 +1,110 @@ +import { describe, expect, it } from 'vitest'; +import { env } from 'node:process'; + +const API_KEY = env.MINIMAX_API_KEY; +const BASE_URL = env.MINIMAX_BASE_URL || 'https://api.minimax.io/v1'; + +describe.skipIf(!API_KEY)('MiniMax E2E', () => { + it( + 'completes basic chat with MiniMax-M2.7', + async () => { + const response = await fetch(`${BASE_URL}/chat/completions`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${API_KEY}`, + }, + body: JSON.stringify({ + model: 'MiniMax-M2.7', + messages: [{ role: 'user', content: 'Say "test passed"' }], + max_tokens: 20, + temperature: 1.0, + }), + }); + + expect(response.ok).toBe(true); + + const data = await response.json(); + + expect(data.choices).toBeDefined(); + expect(data.choices[0].message.content).toBeTruthy(); + }, + 30000, + ); + + it( + 'handles streaming response', + async () => { + const response = await fetch(`${BASE_URL}/chat/completions`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${API_KEY}`, + }, + body: JSON.stringify({ + model: 'MiniMax-M2.7', + messages: [{ role: 'user', content: 'Count 1 to 3' }], + max_tokens: 50, + stream: true, + temperature: 1.0, + }), + }); + + expect(response.ok).toBe(true); + + const reader = response.body!.getReader(); + const decoder = new TextDecoder(); + let chunks = 0; + let buffer = ''; + + while (true) { + const { done, value } = await reader.read(); + + if (done) { + break; + } + + buffer += decoder.decode(value, { stream: true }); + + const lines = buffer.split('\n'); + buffer = lines.pop() || ''; + + for (const line of lines) { + if (line.startsWith('data:') && !line.includes('[DONE]')) { + chunks++; + } + } + } + + expect(chunks).toBeGreaterThan(1); + }, + 30000, + ); + + it( + 'works with MiniMax-M2.7-highspeed model', + async () => { + const response = await fetch(`${BASE_URL}/chat/completions`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${API_KEY}`, + }, + body: JSON.stringify({ + model: 'MiniMax-M2.7-highspeed', + messages: [{ role: 'user', content: 'Say "highspeed test passed"' }], + max_tokens: 20, + temperature: 1.0, + }), + }); + + expect(response.ok).toBe(true); + + const data = await response.json(); + + expect(data.choices).toBeDefined(); + expect(data.choices[0].message.content).toBeTruthy(); + }, + 30000, + ); +}); diff --git a/app/lib/.server/llm/model.spec.ts b/app/lib/.server/llm/model.spec.ts new file mode 100644 index 0000000000..37763894b7 --- /dev/null +++ b/app/lib/.server/llm/model.spec.ts @@ -0,0 +1,106 @@ +import { describe, expect, it, vi } from 'vitest'; + +// Mock @ai-sdk/anthropic +vi.mock('@ai-sdk/anthropic', () => ({ + createAnthropic: vi.fn((config: { apiKey: string }) => { + return vi.fn((modelId: string) => ({ + provider: 'anthropic', + modelId, + apiKey: config.apiKey, + })); + }), +})); + +// Mock @ai-sdk/openai +vi.mock('@ai-sdk/openai', () => ({ + createOpenAI: vi.fn((config: { apiKey: string; baseURL: string }) => { + return vi.fn((modelId: string) => ({ + provider: 'openai-compatible', + modelId, + apiKey: config.apiKey, + baseURL: config.baseURL, + })); + }), +})); + +import { createAnthropic } from '@ai-sdk/anthropic'; +import { createOpenAI } from '@ai-sdk/openai'; +import { getAnthropicModel, getMiniMaxModel, getModel } from './model'; + +describe('model', () => { + describe('getAnthropicModel', () => { + it('creates Anthropic model with correct apiKey', () => { + const model = getAnthropicModel('test-anthropic-key'); + + expect(createAnthropic).toHaveBeenCalledWith({ apiKey: 'test-anthropic-key' }); + expect(model).toBeDefined(); + }); + + it('uses claude-3-5-sonnet-20240620 model', () => { + getAnthropicModel('test-key'); + + const mockFn = vi.mocked(createAnthropic).mock.results[0].value; + expect(mockFn).toHaveBeenCalledWith('claude-3-5-sonnet-20240620'); + }); + }); + + describe('getMiniMaxModel', () => { + it('creates MiniMax model with correct apiKey and default baseURL', () => { + const model = getMiniMaxModel('test-minimax-key'); + + expect(createOpenAI).toHaveBeenCalledWith({ + apiKey: 'test-minimax-key', + baseURL: 'https://api.minimax.io/v1', + }); + expect(model).toBeDefined(); + }); + + it('uses custom baseURL when provided', () => { + getMiniMaxModel('test-key', 'https://api.minimaxi.com/v1'); + + expect(createOpenAI).toHaveBeenCalledWith({ + apiKey: 'test-key', + baseURL: 'https://api.minimaxi.com/v1', + }); + }); + + it('uses MiniMax-M2.7 model', () => { + getMiniMaxModel('test-key'); + + const mockFn = vi.mocked(createOpenAI).mock.results[0].value; + expect(mockFn).toHaveBeenCalledWith('MiniMax-M2.7'); + }); + }); + + describe('getModel', () => { + it('returns Anthropic model for anthropic provider', () => { + const model = getModel('anthropic', 'test-key'); + + expect(createAnthropic).toHaveBeenCalled(); + expect(model).toBeDefined(); + }); + + it('returns MiniMax model for minimax provider', () => { + const model = getModel('minimax', 'test-key'); + + expect(createOpenAI).toHaveBeenCalled(); + expect(model).toBeDefined(); + }); + + it('defaults to Anthropic for unknown provider', () => { + const model = getModel('anthropic', 'test-key'); + + expect(createAnthropic).toHaveBeenCalled(); + expect(model).toBeDefined(); + }); + + it('passes baseURL for MiniMax', () => { + getModel('minimax', 'test-key', 'https://custom.api.com/v1'); + + expect(createOpenAI).toHaveBeenCalledWith({ + apiKey: 'test-key', + baseURL: 'https://custom.api.com/v1', + }); + }); + }); +}); diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index f0d695c47f..da384b5a57 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -1,4 +1,7 @@ import { createAnthropic } from '@ai-sdk/anthropic'; +import { createOpenAI } from '@ai-sdk/openai'; + +export type LLMProvider = 'anthropic' | 'minimax'; export function getAnthropicModel(apiKey: string) { const anthropic = createAnthropic({ @@ -7,3 +10,22 @@ export function getAnthropicModel(apiKey: string) { return anthropic('claude-3-5-sonnet-20240620'); } + +export function getMiniMaxModel(apiKey: string, baseURL?: string) { + const openai = createOpenAI({ + apiKey, + baseURL: baseURL || 'https://api.minimax.io/v1', + }); + + return openai('MiniMax-M2.7'); +} + +export function getModel(provider: LLMProvider, apiKey: string, baseURL?: string) { + switch (provider) { + case 'minimax': + return getMiniMaxModel(apiKey, baseURL); + case 'anthropic': + default: + return getAnthropicModel(apiKey); + } +} diff --git a/app/lib/.server/llm/stream-text.ts b/app/lib/.server/llm/stream-text.ts index cf937fd00e..12d5f5af6e 100644 --- a/app/lib/.server/llm/stream-text.ts +++ b/app/lib/.server/llm/stream-text.ts @@ -1,6 +1,6 @@ import { streamText as _streamText, convertToCoreMessages } from 'ai'; -import { getAPIKey } from '~/lib/.server/llm/api-key'; -import { getAnthropicModel } from '~/lib/.server/llm/model'; +import { getProviderAPIKey } from '~/lib/.server/llm/api-key'; +import { getModel } from '~/lib/.server/llm/model'; import { MAX_TOKENS } from './constants'; import { getSystemPrompt } from './prompts'; @@ -22,13 +22,19 @@ export type Messages = Message[]; export type StreamingOptions = Omit[0], 'model'>; export function streamText(messages: Messages, env: Env, options?: StreamingOptions) { + const { provider, apiKey } = getProviderAPIKey(env); + return _streamText({ - model: getAnthropicModel(getAPIKey(env)), + model: getModel(provider, apiKey), system: getSystemPrompt(), maxTokens: MAX_TOKENS, - headers: { - 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15', - }, + ...(provider === 'anthropic' + ? { + headers: { + 'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15', + }, + } + : {}), messages: convertToCoreMessages(messages), ...options, }); diff --git a/package.json b/package.json index 5583455603..949707dea5 100644 --- a/package.json +++ b/package.json @@ -24,6 +24,7 @@ }, "dependencies": { "@ai-sdk/anthropic": "^0.0.39", + "@ai-sdk/openai": "^0.0.40", "@codemirror/autocomplete": "^6.17.0", "@codemirror/commands": "^6.6.0", "@codemirror/lang-cpp": "^6.0.2", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0896b714b7..74fb839db6 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -14,6 +14,9 @@ importers: '@ai-sdk/anthropic': specifier: ^0.0.39 version: 0.0.39(zod@3.23.8) + '@ai-sdk/openai': + specifier: ^0.0.40 + version: 0.0.40(zod@3.23.8) '@codemirror/autocomplete': specifier: ^6.17.0 version: 6.17.0(@codemirror/language@6.10.2)(@codemirror/state@6.4.1)(@codemirror/view@6.28.4)(@lezer/common@1.2.1) @@ -237,6 +240,21 @@ packages: peerDependencies: zod: ^3.0.0 + '@ai-sdk/openai@0.0.40': + resolution: {integrity: sha512-9Iq1UaBHA5ZzNv6j3govuKGXrbrjuWvZIgWNJv4xzXlDMHu9P9hnqlBr/Aiay54WwCuTVNhTzAUTfFgnTs2kbQ==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + + '@ai-sdk/provider-utils@1.0.5': + resolution: {integrity: sha512-XfOawxk95X3S43arn2iQIFyWGMi0DTxsf9ETc6t7bh91RPWOOPYN1tsmS5MTKD33OGJeaDQ/gnVRzXUCRBrckQ==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + peerDependenciesMeta: + zod: + optional: true + '@ai-sdk/provider-utils@1.0.9': resolution: {integrity: sha512-yfdanjUiCJbtGoRGXrcrmXn0pTyDfRIeY6ozDG96D66f2wupZaZvAgKptUa3zDYXtUCQQvcNJ+tipBBfQD/UYA==} engines: {node: '>=18'} @@ -246,6 +264,10 @@ packages: zod: optional: true + '@ai-sdk/provider@0.0.14': + resolution: {integrity: sha512-gaQ5Y033nro9iX1YUjEDFDRhmMcEiCk56LJdIUbX5ozEiCNCfpiBpEqrjSp/Gp5RzBS2W0BVxfG7UGW6Ezcrzg==} + engines: {node: '>=18'} + '@ai-sdk/provider@0.0.17': resolution: {integrity: sha512-f9j+P5yYRkqKFHxvWae5FI0j6nqROPCoPnMkpc2hc2vC7vKjqzrxBJucD8rpSaUjqiBnY/QuRJ0QeV717Uz5tg==} engines: {node: '>=18'} @@ -5202,6 +5224,21 @@ snapshots: '@ai-sdk/provider-utils': 1.0.9(zod@3.23.8) zod: 3.23.8 + '@ai-sdk/openai@0.0.40(zod@3.23.8)': + dependencies: + '@ai-sdk/provider': 0.0.14 + '@ai-sdk/provider-utils': 1.0.5(zod@3.23.8) + zod: 3.23.8 + + '@ai-sdk/provider-utils@1.0.5(zod@3.23.8)': + dependencies: + '@ai-sdk/provider': 0.0.14 + eventsource-parser: 1.1.2 + nanoid: 3.3.6 + secure-json-parse: 2.7.0 + optionalDependencies: + zod: 3.23.8 + '@ai-sdk/provider-utils@1.0.9(zod@3.23.8)': dependencies: '@ai-sdk/provider': 0.0.17 @@ -5211,6 +5248,10 @@ snapshots: optionalDependencies: zod: 3.23.8 + '@ai-sdk/provider@0.0.14': + dependencies: + json-schema: 0.4.0 + '@ai-sdk/provider@0.0.17': dependencies: json-schema: 0.4.0 diff --git a/worker-configuration.d.ts b/worker-configuration.d.ts index 606a4e521c..1be40e50a8 100644 --- a/worker-configuration.d.ts +++ b/worker-configuration.d.ts @@ -1,3 +1,5 @@ interface Env { ANTHROPIC_API_KEY: string; + MINIMAX_API_KEY?: string; + DEFAULT_LLM_PROVIDER?: string; }