Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

✨ feat: upgrade to the new tool calls mode #2414

Merged
merged 24 commits into from
May 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
5d11c4f
✨ feat: finish OpenAI stream tool_calls protocol
arvinxx May 7, 2024
24543c1
💄 style: design the Tool Call UI
arvinxx May 7, 2024
5d1ed62
✨ feat: support zhipu GLM tool calling
arvinxx May 8, 2024
326102e
✨ feat: support moonshot function calling
arvinxx May 8, 2024
6cf086c
✨ feat: support groq tool callings
arvinxx May 8, 2024
dde5582
✨ feat: azure openai stream tool calling
arvinxx May 8, 2024
ad30764
✨ feat: support minimax tool calling
arvinxx May 8, 2024
1292e80
✨ feat: support mistral tool calling
arvinxx May 8, 2024
7e68723
✨ feat: support google tool calling
arvinxx May 9, 2024
ca4f30b
✨ feat: support anthropic tool calling
arvinxx May 9, 2024
bd0b3f7
♻️ refactor: refactor ollama stream
arvinxx May 10, 2024
24b6abb
♻️ refactor: refactor the protocol stream transformer
arvinxx May 10, 2024
83cf6ec
♻️ refactor: refactor the bedrock stream
arvinxx May 10, 2024
fb59d4f
♻️ refactor: remove Stream Text Response
arvinxx May 10, 2024
dc4d0c2
✅ test: clean some tests
arvinxx May 10, 2024
6c07cf9
🗃️ refactor: import and export tool migration
arvinxx May 11, 2024
59402d0
🗃️ refactor: migrate database
arvinxx May 11, 2024
41e9ec4
✅ test: fix test
arvinxx May 11, 2024
e7ae100
♻️ refactor: refactor createCallbacksTransformer to fix tests
arvinxx May 11, 2024
d4965b2
✅ test: add tests for runtime
arvinxx May 11, 2024
b047563
✅ test: add more tests
arvinxx May 11, 2024
5e5add8
✅ test: add test
arvinxx May 11, 2024
0677a51
🎨 chore: improve code
arvinxx May 11, 2024
fed7083
🐛 fix: fix auto meta
arvinxx May 11, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@
},
"dependencies": {
"@ant-design/icons": "^5.3.6",
"@anthropic-ai/sdk": "^0.18.0",
"@anthropic-ai/sdk": "^0.20.9",
"@auth/core": "0.28.0",
"@aws-sdk/client-bedrock-runtime": "^3.565.0",
"@azure/openai": "^1.0.0-beta.12",
Expand All @@ -97,6 +97,7 @@
"@lobehub/icons": "latest",
"@lobehub/tts": "latest",
"@lobehub/ui": "^1.138.17",
"@microsoft/fetch-event-source": "^2.0.1",
"@next/third-parties": "^14.2.3",
"@sentry/nextjs": "^7.112.2",
"@t3-oss/env-nextjs": "^0.10.1",
Expand Down
3 changes: 3 additions & 0 deletions src/config/modelProviders/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ const Anthropic: ModelProviderCard = {
'Ideal balance of intelligence and speed for enterprise workloads. Maximum utility at a lower price, dependable, balanced for scaled deployments',
displayName: 'Claude 3 Sonnet',
enabled: true,
functionCall: true,
id: 'claude-3-sonnet-20240229',
maxOutput: 4096,
tokens: 200_000,
Expand All @@ -18,6 +19,7 @@ const Anthropic: ModelProviderCard = {
'Most powerful model for highly complex tasks. Top-level performance, intelligence, fluency, and understanding',
displayName: 'Claude 3 Opus',
enabled: true,
functionCall: true,
id: 'claude-3-opus-20240229',
maxOutput: 4096,
tokens: 200_000,
Expand All @@ -28,6 +30,7 @@ const Anthropic: ModelProviderCard = {
'Fastest and most compact model for near-instant responsiveness. Quick and accurate targeted performance',
displayName: 'Claude 3 Haiku',
enabled: true,
functionCall: true,
id: 'claude-3-haiku-20240307',
maxOutput: 4096,
tokens: 200_000,
Expand Down
3 changes: 3 additions & 0 deletions src/config/modelProviders/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ const Google: ModelProviderCard = {
description: 'The best model for scaling across a wide range of tasks',
displayName: 'Gemini 1.0 Pro',
enabled: true,
functionCall: true,
id: 'gemini-pro',
maxOutput: 2048,
tokens: 30_720 + 2048,
Expand All @@ -47,6 +48,7 @@ const Google: ModelProviderCard = {
description:
'The best model for scaling across a wide range of tasks. This is a stable model that supports tuning.',
displayName: 'Gemini 1.0 Pro 001 (Tuning)',
functionCall: true,
id: 'gemini-1.0-pro-001',
maxOutput: 2048,
tokens: 30_720 + 2048,
Expand All @@ -71,6 +73,7 @@ const Google: ModelProviderCard = {
description: 'Mid-size multimodal model that supports up to 1 million tokens',
displayName: 'Gemini 1.5 Pro',
enabled: true,
functionCall: true,
id: 'gemini-1.5-pro-latest',
maxOutput: 8192,
tokens: 1_048_576 + 8192,
Expand Down
6 changes: 5 additions & 1 deletion src/config/modelProviders/groq.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,24 +6,28 @@ const Groq: ModelProviderCard = {
{
displayName: 'LLaMA3-3-70B',
enabled: true,
functionCall: true,
id: 'llama3-70b-8192',
tokens: 8192,
},
{
displayName: 'Mixtral-8x7b-Instruct-v0.1',
displayName: 'Mixtral-8x7b',
enabled: true,
functionCall: true,
id: 'mixtral-8x7b-32768',
tokens: 32_768,
},
{
displayName: 'Gemma-7b-it',
enabled: true,
functionCall: true,
id: 'gemma-7b-it',
tokens: 8192,
},
{
displayName: 'LLaMA3-3-8B',
enabled: true,
functionCall: true,
id: 'llama3-8b-8192',
tokens: 8192,
},
Expand Down
17 changes: 10 additions & 7 deletions src/config/modelProviders/minimax.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,27 @@ import { ModelProviderCard } from '@/types/llm';
// ref https://www.minimaxi.com/document/guides/chat-model/pro/api
const Minimax: ModelProviderCard = {
chatModels: [
{
description: '复杂场景,例如应用题计算、科学计算等场景',
displayName: 'abab6.5',
enabled: true,
id: 'abab6.5-chat',
tokens: 8192,
},
{
description: '通用场景',
displayName: 'abab6.5s',
enabled: true,
functionCall: true,
id: 'abab6.5s-chat',
tokens: 245_760,
},
{
description: '复杂场景,例如应用题计算、科学计算等场景',
displayName: 'abab6.5',
enabled: true,
functionCall: true,
id: 'abab6.5-chat',
tokens: 8192,
},
{
description: '更复杂的格式化文本生成',
displayName: 'abab6',
enabled: true,
functionCall: true,
id: 'abab6-chat',
tokens: 32_768,
},
Expand Down
1 change: 1 addition & 0 deletions src/config/modelProviders/mistral.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ const Mistral: ModelProviderCard = {
{
displayName: 'Mixtral 8x22B',
enabled: true,
functionCall: true,
id: 'open-mixtral-8x22b',
tokens: 65_536,
},
Expand Down
3 changes: 3 additions & 0 deletions src/config/modelProviders/moonshot.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,21 @@ const Moonshot: ModelProviderCard = {
{
displayName: 'Moonshot V1 8K',
enabled: true,
functionCall: true,
id: 'moonshot-v1-8k',
tokens: 8192,
},
{
displayName: 'Moonshot V1 32K',
enabled: true,
functionCall: true,
id: 'moonshot-v1-32k',
tokens: 32_768,
},
{
displayName: 'Moonshot V1 128K',
enabled: true,
functionCall: true,
id: 'moonshot-v1-128k',
tokens: 128_000,
},
Expand Down
8 changes: 2 additions & 6 deletions src/config/modelProviders/zhipu.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,13 @@
import { ModelProviderCard } from '@/types/llm';

// TODO: 等待 ZhiPu 修复 API 问题后开启 functionCall
// 暂时不透出 GLM 系列的 function_call 功能
// refs https://github.com/lobehub/lobe-chat/discussions/737#discussioncomment-8315815

// ref https://open.bigmodel.cn/dev/howuse/model
const ZhiPu: ModelProviderCard = {
chatModels: [
{
description: '最新的 GLM-4 、最大支持 128k 上下文、支持 Function Call 、Retreival',
displayName: 'GLM-4',
enabled: true,
// functionCall: true,
functionCall: true,
id: 'glm-4',
tokens: 128_000,
},
Expand All @@ -28,7 +24,7 @@ const ZhiPu: ModelProviderCard = {
description: '最新的glm-3-turbo、最大支持 128k上下文、支持Function Call、Retreival',
displayName: 'GLM-3 Turbo',
enabled: true,
// functionCall: true,
functionCall: true,
id: 'glm-3-turbo',
tokens: 128_000,
},
Expand Down
2 changes: 1 addition & 1 deletion src/config/server/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ export const getProviderConfig = () => {
AWS_ACCESS_KEY_ID: AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY: process.env.AWS_SECRET_ACCESS_KEY || '',

ENABLE_OLLAMA: process.env.ENABLE_OLLAMA as unknown as boolean,
ENABLE_OLLAMA: Boolean(process.env.ENABLE_OLLAMA),
OLLAMA_PROXY_URL: process.env.OLLAMA_PROXY_URL || '',
OLLAMA_MODEL_LIST: process.env.OLLAMA_MODEL_LIST || process.env.OLLAMA_CUSTOM_MODELS,
};
Expand Down
32 changes: 32 additions & 0 deletions src/database/client/core/db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
dbSchemaV5,
dbSchemaV6,
dbSchemaV7,
dbSchemaV9,
} from './schemas';
import { DBModel, LOBE_CHAT_LOCAL_DB_NAME } from './types/db';

Expand Down Expand Up @@ -67,6 +68,10 @@
.stores(dbSchemaV7)
.upgrade((trans) => this.upgradeToV8(trans));

this.version(9)
.stores(dbSchemaV9)
.upgrade((trans) => this.upgradeToV9(trans));

this.files = this.table('files');
this.sessions = this.table('sessions');
this.messages = this.table('messages');
Expand Down Expand Up @@ -153,6 +158,33 @@
}
});
};

upgradeToV9 = async (trans: Transaction) => {
const messages = trans.table('messages');
await messages.toCollection().modify(async (message: DBModel<DB_Message>) => {
if ((message.role as string) === 'function') {
const origin = Object.assign({}, message);

const toolCallId = `tool_call_${message.id}`;
const assistantMessageId = `tool_calls_${message.id}`;

message.role = 'tool';
message.tool_call_id = toolCallId;
message.parentId = assistantMessageId;

await messages.add({
...origin,
content: '',
createdAt: message.createdAt - 10,
error: undefined,
id: assistantMessageId,
role: 'assistant',
tools: [{ ...message.plugin!, id: toolCallId }],
updatedAt: message.updatedAt - 10,
} as DBModel<DB_Message>);
}

Check warning on line 185 in src/database/client/core/db.ts

View check run for this annotation

Codecov / codecov/patch

src/database/client/core/db.ts#L165-L185

Added lines #L165 - L185 were not covered by tests
});
};
}

export const browserDB = new BrowserDB();
Expand Down
9 changes: 9 additions & 0 deletions src/database/client/core/schemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,3 +76,12 @@ export const dbSchemaV7 = {
plugins:
'&identifier, id, type, manifest.type, manifest.meta.title, manifest.meta.description, manifest.meta.author, createdAt, updatedAt',
};
// ************************************** //
// ******* Version 9 - 2024-03-14 ******* //
// ************************************** //
// - Added id to `plugins` table
export const dbSchemaV9 = {
...dbSchemaV7,
messages:
'&id, role, content, fromModel, favorite, tool_call_id, plugin.identifier, plugin.apiName, translate.content, createdAt, updatedAt, sessionId, topicId, quotaId, parentId, [sessionId+topicId], traceId',
};
4 changes: 2 additions & 2 deletions src/database/client/models/__tests__/message.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -264,14 +264,14 @@ describe('MessageModel', () => {
it('should update a role and plugins', async () => {
const createdMessage = await MessageModel.create(messageData);
const updateData = {
role: 'function' as const,
role: 'tool' as const,
plugin: { apiName: 'a', identifier: 'b', arguments: 'abc' },
};

await MessageModel.update(createdMessage.id, updateData);
const updatedMessage = await MessageModel.findById(createdMessage.id);

expect(updatedMessage).toHaveProperty('role', 'function');
expect(updatedMessage).toHaveProperty('role', 'tool');
});
});

Expand Down
9 changes: 8 additions & 1 deletion src/database/client/schemas/message.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,20 @@ const PluginSchema = z.object({
type: z.enum(['default', 'markdown', 'standalone', 'builtin']).default('default'),
});

const ToolCallSchema = PluginSchema.extend({
id: z.string(),
});

export const DB_MessageSchema = z.object({
role: z.enum(['user', 'system', 'assistant', 'function']),
role: z.enum(['user', 'system', 'assistant', 'tool']),
content: z.string(),
files: z.array(z.string()).optional(),
favorite: z.number().int().min(0).max(1).optional(),
error: z.any().optional(),

tools: z.array(ToolCallSchema).optional(),
tool_call_id: z.string().optional(),

plugin: PluginSchema.optional(),
pluginState: z.any().optional(),
fromModel: z.string().optional(),
Expand Down
21 changes: 15 additions & 6 deletions src/features/AgentSetting/store/action.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import { TraceNameMap, TracePayload, TraceTopicType } from '@/const/trace';
import { chatService } from '@/services/chat';
import { LobeAgentConfig } from '@/types/agent';
import { MetaData } from '@/types/meta';
import { MessageTextChunk } from '@/utils/fetch';
import { setNamespace } from '@/utils/storeDebug';

import { SessionLoadingState } from '../store/initialState';
Expand Down Expand Up @@ -246,17 +247,25 @@ export const store: StateCreator<Store, [['zustand/devtools', never]]> = (set, g

streamUpdateMetaArray: (key: keyof MetaData) => {
let value = '';
return (text: string) => {
value += text;
get().dispatchMeta({ type: 'update', value: { [key]: value.split(',') } });
return (chunk: MessageTextChunk) => {
switch (chunk.type) {
case 'text': {
value += chunk.text;
get().dispatchMeta({ type: 'update', value: { [key]: value.split(',') } });
}
}
};
},

streamUpdateMetaString: (key: keyof MetaData) => {
let value = '';
return (text: string) => {
value += text;
get().dispatchMeta({ type: 'update', value: { [key]: value } });
return (chunk: MessageTextChunk) => {
switch (chunk.type) {
case 'text': {
value += chunk.text;
get().dispatchMeta({ type: 'update', value: { [key]: value } });
}
}
};
},

Expand Down
16 changes: 16 additions & 0 deletions src/features/Conversation/Actions/Tool.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import { memo } from 'react';

import { RenderAction } from '../types';

export const ToolActionsBar: RenderAction = memo(() => {
return undefined;
// const { regenerate } = useChatListActionsBar();
// return (
// <ActionIconGroup
// dropdownMenu={[regenerate]}
// items={[regenerate]}
// onActionClick={onActionClick}
// type="ghost"
// />
// );
});
4 changes: 2 additions & 2 deletions src/features/Conversation/Actions/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@ import { LLMRoleType } from '@/types/llm';
import { OnActionsClick, RenderAction } from '../types';
import { AssistantActionsBar } from './Assistant';
import { DefaultActionsBar } from './Fallback';
import { FunctionActionsBar } from './Function';
import { ToolActionsBar } from './Tool';
import { UserActionsBar } from './User';

export const renderActions: Record<LLMRoleType, RenderAction> = {
assistant: AssistantActionsBar,
function: FunctionActionsBar,
system: DefaultActionsBar,
tool: ToolActionsBar,
user: UserActionsBar,
};

Expand Down