Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 12 additions & 4 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,11 @@ deployed using [CloudFlare Pages](https://pages.cloudflare.com/) and
### AI SDK Integration

Bolt uses the [AI SDK](https://github.com/vercel/ai) to integrate with AI
models. At this time, Bolt supports using Anthropic's Claude Sonnet 3.5.
You can get an API key from the [Anthropic API Console](https://console.anthropic.com/) to use with Bolt.
models. Bolt supports the following LLM providers:

- **Anthropic** (default): Claude Sonnet 3.5. Get an API key from the [Anthropic API Console](https://console.anthropic.com/).
- **MiniMax**: MiniMax-M2.7 (default) and MiniMax-M2.7-highspeed. Get an API key from the [MiniMax Platform](https://platform.minimax.io/).

Take a look at how [Bolt uses the AI SDK](https://github.com/stackblitz/bolt.new/tree/main/app/lib/.server/llm)

## Prerequisites
Expand All @@ -56,10 +59,15 @@ git clone https://github.com/stackblitz/bolt.new.git
pnpm install
```

3. Create a `.env.local` file in the root directory and add your Anthropic API key:
3. Create a `.env.local` file in the root directory and add your API key:

```
```bash
# Default: Anthropic (Claude)
ANTHROPIC_API_KEY=XXX

# To use MiniMax instead, set:
# DEFAULT_LLM_PROVIDER=minimax
# MINIMAX_API_KEY=XXX
```

Optionally, you can set the debug level:
Expand Down
128 changes: 128 additions & 0 deletions app/lib/.server/llm/api-key.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
import { describe, expect, it, beforeEach, afterEach } from 'vitest';
import { env } from 'node:process';
import { getAPIKey, getMiniMaxAPIKey, getLLMProvider, getProviderAPIKey } from './api-key';

describe('api-key', () => {
const savedAnthropicKey = env.ANTHROPIC_API_KEY;
const savedMiniMaxKey = env.MINIMAX_API_KEY;
const savedProvider = env.DEFAULT_LLM_PROVIDER;

beforeEach(() => {
delete env.ANTHROPIC_API_KEY;
delete env.MINIMAX_API_KEY;
delete env.DEFAULT_LLM_PROVIDER;
});

afterEach(() => {
// Restore original values
if (savedAnthropicKey !== undefined) {
env.ANTHROPIC_API_KEY = savedAnthropicKey;
} else {
delete env.ANTHROPIC_API_KEY;
}

if (savedMiniMaxKey !== undefined) {
env.MINIMAX_API_KEY = savedMiniMaxKey;
} else {
delete env.MINIMAX_API_KEY;
}

if (savedProvider !== undefined) {
env.DEFAULT_LLM_PROVIDER = savedProvider;
} else {
delete env.DEFAULT_LLM_PROVIDER;
}
});

const createMockEnv = (overrides: Partial<Env> = {}): Env => ({
ANTHROPIC_API_KEY: '',
...overrides,
});

describe('getAPIKey', () => {
it('returns Anthropic API key from process.env', () => {
env.ANTHROPIC_API_KEY = 'env-anthropic-key';

expect(getAPIKey(createMockEnv())).toBe('env-anthropic-key');
});

it('falls back to cloudflare env', () => {
expect(getAPIKey(createMockEnv({ ANTHROPIC_API_KEY: 'cf-key' }))).toBe('cf-key');
});

it('prefers process.env over cloudflare env', () => {
env.ANTHROPIC_API_KEY = 'env-key';

expect(getAPIKey(createMockEnv({ ANTHROPIC_API_KEY: 'cf-key' }))).toBe('env-key');
});
});

describe('getMiniMaxAPIKey', () => {
it('returns MiniMax API key from process.env', () => {
env.MINIMAX_API_KEY = 'env-minimax-key';

expect(getMiniMaxAPIKey(createMockEnv())).toBe('env-minimax-key');
});

it('falls back to cloudflare env', () => {
const cfEnv = createMockEnv() as Env & { MINIMAX_API_KEY: string };
cfEnv.MINIMAX_API_KEY = 'cf-minimax-key';

expect(getMiniMaxAPIKey(cfEnv)).toBe('cf-minimax-key');
});

it('returns empty string when not configured', () => {
expect(getMiniMaxAPIKey(createMockEnv())).toBe('');
});
});

describe('getLLMProvider', () => {
it('defaults to anthropic', () => {
expect(getLLMProvider(createMockEnv())).toBe('anthropic');
});

it('returns minimax when configured via process.env', () => {
env.DEFAULT_LLM_PROVIDER = 'minimax';

expect(getLLMProvider(createMockEnv())).toBe('minimax');
});

it('is case-insensitive', () => {
env.DEFAULT_LLM_PROVIDER = 'MiniMax';

expect(getLLMProvider(createMockEnv())).toBe('minimax');
});

it('falls back to cloudflare env', () => {
const cfEnv = createMockEnv() as Env & { DEFAULT_LLM_PROVIDER: string };
cfEnv.DEFAULT_LLM_PROVIDER = 'minimax';

expect(getLLMProvider(cfEnv)).toBe('minimax');
});

it('returns anthropic for unknown providers', () => {
env.DEFAULT_LLM_PROVIDER = 'unknown-provider';

expect(getLLMProvider(createMockEnv())).toBe('anthropic');
});
});

describe('getProviderAPIKey', () => {
it('returns anthropic provider and key by default', () => {
env.ANTHROPIC_API_KEY = 'anthropic-key';

const result = getProviderAPIKey(createMockEnv());

expect(result).toEqual({ provider: 'anthropic', apiKey: 'anthropic-key' });
});

it('returns minimax provider and key when configured', () => {
env.DEFAULT_LLM_PROVIDER = 'minimax';
env.MINIMAX_API_KEY = 'minimax-key';

const result = getProviderAPIKey(createMockEnv());

expect(result).toEqual({ provider: 'minimax', apiKey: 'minimax-key' });
});
});
});
27 changes: 27 additions & 0 deletions app/lib/.server/llm/api-key.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { env } from 'node:process';
import type { LLMProvider } from './model';

export function getAPIKey(cloudflareEnv: Env) {
/**
Expand All @@ -7,3 +8,29 @@ export function getAPIKey(cloudflareEnv: Env) {
*/
return env.ANTHROPIC_API_KEY || cloudflareEnv.ANTHROPIC_API_KEY;
}

export function getMiniMaxAPIKey(cloudflareEnv: Env) {
return env.MINIMAX_API_KEY || cloudflareEnv.MINIMAX_API_KEY || '';
}

export function getLLMProvider(cloudflareEnv: Env): LLMProvider {
const provider = (env.DEFAULT_LLM_PROVIDER || cloudflareEnv.DEFAULT_LLM_PROVIDER || 'anthropic').toLowerCase();

if (provider === 'minimax') {
return 'minimax';
}

return 'anthropic';
}

export function getProviderAPIKey(cloudflareEnv: Env): { provider: LLMProvider; apiKey: string } {
const provider = getLLMProvider(cloudflareEnv);

switch (provider) {
case 'minimax':
return { provider, apiKey: getMiniMaxAPIKey(cloudflareEnv) };
case 'anthropic':
default:
return { provider: 'anthropic', apiKey: getAPIKey(cloudflareEnv) };
}
}
110 changes: 110 additions & 0 deletions app/lib/.server/llm/minimax.e2e.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
import { describe, expect, it } from 'vitest';
import { env } from 'node:process';

const API_KEY = env.MINIMAX_API_KEY;
const BASE_URL = env.MINIMAX_BASE_URL || 'https://api.minimax.io/v1';

describe.skipIf(!API_KEY)('MiniMax E2E', () => {
it(
'completes basic chat with MiniMax-M2.7',
async () => {
const response = await fetch(`${BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${API_KEY}`,
},
body: JSON.stringify({
model: 'MiniMax-M2.7',
messages: [{ role: 'user', content: 'Say "test passed"' }],
max_tokens: 20,
temperature: 1.0,
}),
});

expect(response.ok).toBe(true);

const data = await response.json();

expect(data.choices).toBeDefined();
expect(data.choices[0].message.content).toBeTruthy();
},
30000,
);

it(
'handles streaming response',
async () => {
const response = await fetch(`${BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${API_KEY}`,
},
body: JSON.stringify({
model: 'MiniMax-M2.7',
messages: [{ role: 'user', content: 'Count 1 to 3' }],
max_tokens: 50,
stream: true,
temperature: 1.0,
}),
});

expect(response.ok).toBe(true);

const reader = response.body!.getReader();
const decoder = new TextDecoder();
let chunks = 0;
let buffer = '';

while (true) {
const { done, value } = await reader.read();

if (done) {
break;
}

buffer += decoder.decode(value, { stream: true });

const lines = buffer.split('\n');
buffer = lines.pop() || '';

for (const line of lines) {
if (line.startsWith('data:') && !line.includes('[DONE]')) {
chunks++;
}
}
}

expect(chunks).toBeGreaterThan(1);
},
30000,
);

it(
'works with MiniMax-M2.7-highspeed model',
async () => {
const response = await fetch(`${BASE_URL}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${API_KEY}`,
},
body: JSON.stringify({
model: 'MiniMax-M2.7-highspeed',
messages: [{ role: 'user', content: 'Say "highspeed test passed"' }],
max_tokens: 20,
temperature: 1.0,
}),
});

expect(response.ok).toBe(true);

const data = await response.json();

expect(data.choices).toBeDefined();
expect(data.choices[0].message.content).toBeTruthy();
},
30000,
);
});
Loading
Loading