Skip to content

Commit

Permalink
✅ test: add more tests
Browse files Browse the repository at this point in the history
  • Loading branch information
arvinxx committed May 11, 2024
1 parent d4965b2 commit c086945
Show file tree
Hide file tree
Showing 10 changed files with 9,975 additions and 1 deletion.
886 changes: 886 additions & 0 deletions src/libs/agent-runtime/togetherai/__snapshots__/index.test.ts.snap

Large diffs are not rendered by default.

8,111 changes: 8,111 additions & 0 deletions src/libs/agent-runtime/togetherai/fixtures/models.json

Large diffs are not rendered by default.

16 changes: 15 additions & 1 deletion src/libs/agent-runtime/togetherai/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
import OpenAI from 'openai';
import { Mock, afterEach, beforeEach, describe, expect, it, vi } from 'vitest';

import { ChatStreamCallbacks, LobeOpenAICompatibleRuntime } from '@/libs/agent-runtime';
import { LobeOpenAICompatibleRuntime } from '@/libs/agent-runtime';

import * as debugStreamModule from '../utils/debugStream';
import models from './fixtures/models.json';
import { LobeTogetherAI } from './index';

const provider = 'togetherai';
Expand Down Expand Up @@ -295,4 +296,17 @@ describe('LobeTogetherAI', () => {
});
});
});

describe('models', () => {
it('should get models', async () => {
vi.spyOn(globalThis, 'fetch').mockResolvedValueOnce({
json: async () => models,
ok: true,
} as Response);

const list = await instance.models();

expect(list).toMatchSnapshot();
});
});
});
119 changes: 119 additions & 0 deletions src/libs/agent-runtime/utils/anthropicHelpers.test.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import { OpenAI } from 'openai';
import { describe, expect, it } from 'vitest';

import { OpenAIChatMessage, UserMessageContentPart } from '../types/chat';
import {
buildAnthropicBlock,
buildAnthropicMessage,
buildAnthropicMessages,
buildAnthropicTools,
} from './anthropicHelpers';
import { parseDataUri } from './uriParser';

Expand Down Expand Up @@ -51,6 +53,86 @@ describe('anthropicHelpers', () => {
const result = buildAnthropicMessage(message);
expect(result).toEqual({ content: [{ type: 'text', text: 'Hello!' }], role: 'user' });
});

it('should correctly convert user message with string content', () => {
const message: OpenAIChatMessage = {
content: 'Hello!',
role: 'user',
};
const result = buildAnthropicMessage(message);
expect(result).toEqual({ content: 'Hello!', role: 'user' });
});

it('should correctly convert user message with content parts', () => {
const message: OpenAIChatMessage = {
content: [
{ type: 'text', text: 'Check out this image:' },
{ type: 'image_url', image_url: { url: 'data:image/png;base64,abc123' } },
],
role: 'user',
};
const result = buildAnthropicMessage(message);
expect(result.role).toBe('user');
expect(result.content).toHaveLength(2);
expect((result.content[1] as any).type).toBe('image');
});

it('should correctly convert tool message', () => {
const message: OpenAIChatMessage = {
content: 'Tool result content',
role: 'tool',
tool_call_id: 'tool123',
};
const result = buildAnthropicMessage(message);
expect(result.role).toBe('user');
expect(result.content).toEqual([
{
content: 'Tool result content',
tool_use_id: 'tool123',
type: 'tool_result',
},
]);
});

it('should correctly convert assistant message with tool calls', () => {
const message: OpenAIChatMessage = {
content: 'Here is the result:',
role: 'assistant',
tool_calls: [
{
id: 'call1',
type: 'function',
function: {
name: 'search',
arguments: '{"query":"anthropic"}',
},
},
],
};
const result = buildAnthropicMessage(message);
expect(result.role).toBe('assistant');
expect(result.content).toEqual([
{ text: 'Here is the result:', type: 'text' },
{
id: 'call1',
input: { query: 'anthropic' },
name: 'search',
type: 'tool_use',
},
]);
});

it('should correctly convert function message', () => {
const message: OpenAIChatMessage = {
content: 'def hello(name):\n return f"Hello {name}"',
role: 'function',
};
const result = buildAnthropicMessage(message);
expect(result).toEqual({
content: 'def hello(name):\n return f"Hello {name}"',
role: 'assistant',
});
});
});

describe('buildAnthropicMessages', () => {
Expand Down Expand Up @@ -111,4 +193,41 @@ describe('anthropicHelpers', () => {
]);
});
});

describe('buildAnthropicTools', () => {
it('should correctly convert OpenAI tools to Anthropic format', () => {
const tools: OpenAI.ChatCompletionTool[] = [
{
type: 'function',
function: {
name: 'search',
description: 'Searches the web',
parameters: {
type: 'object',
properties: {
query: { type: 'string' },
},
required: ['query'],
},
},
},
];

const result = buildAnthropicTools(tools);

expect(result).toEqual([
{
name: 'search',
description: 'Searches the web',
input_schema: {
type: 'object',
properties: {
query: { type: 'string' },
},
required: ['query'],
},
},
]);
});
});
});
70 changes: 70 additions & 0 deletions src/libs/agent-runtime/utils/debugStream.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';

import { debugStream } from './debugStream';

describe('debugStream', () => {
let consoleLogSpy: ReturnType<typeof vi.spyOn>;
let consoleErrorSpy: ReturnType<typeof vi.spyOn>;

beforeEach(() => {
consoleLogSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
});

afterEach(() => {
consoleLogSpy.mockRestore();
consoleErrorSpy.mockRestore();
});

it('should log stream start and end messages', async () => {
const stream = new ReadableStream({
start(controller) {
controller.enqueue('test chunk');
controller.close();
},
});

await debugStream(stream);

expect(consoleLogSpy).toHaveBeenCalledWith(expect.stringMatching(/^\[stream start\]/));
});

it('should handle and log stream errors', async () => {
const stream = new ReadableStream({
start(controller) {
controller.enqueue('test chunk');
},
});

await debugStream(stream);

expect(consoleErrorSpy).toHaveBeenCalledWith('[debugStream error]', expect.any(Error));
expect(consoleErrorSpy).toHaveBeenCalledWith('[error chunk value:]', 'test chunk');
});

it('should decode ArrayBuffer chunk values', async () => {
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode('test chunk'));
controller.close();
},
});

await debugStream(stream);

expect(consoleLogSpy).toHaveBeenCalledWith('test chunk');
});

it('should stringify non-string chunk values', async () => {
const stream = new ReadableStream({
start(controller) {
controller.enqueue({ test: 'chunk' });
controller.close();
},
});

await debugStream(stream);

expect(consoleLogSpy).toHaveBeenCalledWith('{"test":"chunk"}');
});
});
141 changes: 141 additions & 0 deletions src/libs/agent-runtime/utils/streams/anthropic.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
import type { Stream } from '@anthropic-ai/sdk/streaming';
import { describe, expect, it, vi } from 'vitest';

import { AnthropicStream } from './anthropic';

describe('AnthropicStream', () => {
it('should transform Anthropic stream to protocol stream', async () => {
// @ts-ignore
const mockAnthropicStream: Stream = {
[Symbol.asyncIterator]() {
let count = 0;
return {
next: async () => {
switch (count) {
case 0:
count++;
return {
done: false,
value: {
type: 'message_start',
message: { id: 'message_1', metadata: {} },
},
};
case 1:
count++;
return {
done: false,
value: {
type: 'content_block_delta',
delta: { type: 'text_delta', text: 'Hello' },
},
};
case 2:
count++;
return {
done: false,
value: {
type: 'content_block_delta',
delta: { type: 'text_delta', text: ' world!' },
},
};
case 3:
count++;
return {
done: false,
value: {
type: 'message_delta',
delta: { stop_reason: 'stop' },
},
};
default:
return { done: true, value: undefined };
}
},
};
},
};

const onStartMock = vi.fn();
const onTextMock = vi.fn();
const onTokenMock = vi.fn();
const onCompletionMock = vi.fn();

const protocolStream = AnthropicStream(mockAnthropicStream, {
onStart: onStartMock,
onText: onTextMock,
onToken: onTokenMock,
onCompletion: onCompletionMock,
});

const decoder = new TextDecoder();
const chunks = [];

// @ts-ignore
for await (const chunk of protocolStream) {
chunks.push(decoder.decode(chunk, { stream: true }));
}

expect(chunks).toEqual([
'id: message_1\n',
'event: data\n',
`data: {"id":"message_1","metadata":{}}\n\n`,
'id: message_1\n',
'event: text\n',
`data: "Hello"\n\n`,
'id: message_1\n',
'event: text\n',
`data: " world!"\n\n`,
'id: message_1\n',
'event: stop\n',
`data: "stop"\n\n`,
]);

expect(onStartMock).toHaveBeenCalledTimes(1);
expect(onTextMock).toHaveBeenNthCalledWith(1, '"Hello"');
expect(onTextMock).toHaveBeenNthCalledWith(2, '" world!"');
expect(onTokenMock).toHaveBeenCalledTimes(2);
expect(onCompletionMock).toHaveBeenCalledTimes(1);
});

it('should handle ReadableStream input', async () => {
const mockReadableStream = new ReadableStream({
start(controller) {
controller.enqueue({
type: 'message_start',
message: { id: 'message_1', metadata: {} },
});
controller.enqueue({
type: 'content_block_delta',
delta: { type: 'text_delta', text: 'Hello' },
});
controller.enqueue({
type: 'message_stop',
});
controller.close();
},
});

const protocolStream = AnthropicStream(mockReadableStream);

const decoder = new TextDecoder();
const chunks = [];

// @ts-ignore
for await (const chunk of protocolStream) {
chunks.push(decoder.decode(chunk, { stream: true }));
}

expect(chunks).toEqual([
'id: message_1\n',
'event: data\n',
`data: {"id":"message_1","metadata":{}}\n\n`,
'id: message_1\n',
'event: text\n',
`data: "Hello"\n\n`,
'id: message_1\n',
'event: stop\n',
`data: "message_stop"\n\n`,
]);
});
});

0 comments on commit c086945

Please sign in to comment.