mirror of
https://github.com/geoffsee/open-gsio.git
synced 2025-09-08 22:56:46 +00:00
**Refactor:** Restructure server
package to streamline imports and improve file organization
- Moved `providers`, `services`, `models`, `lib`, and related files to `src` directory within `server` package. - Adjusted imports across the codebase to reflect the new paths. - Renamed several `.ts` files for consistency. - Introduced an `index.ts` in the `ai/providers` package to export all providers. This improves maintainability and aligns with the project's updated directory structure.
This commit is contained in:
155
packages/server/src/lib/__tests__/assistant-sdk.test.ts
Normal file
155
packages/server/src/lib/__tests__/assistant-sdk.test.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
import { AssistantSdk } from '../assistant-sdk.ts';
|
||||
import { Utils } from '../utils.ts';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('../utils', () => ({
|
||||
Utils: {
|
||||
selectEquitably: vi.fn(),
|
||||
getCurrentDate: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('../prompts/few_shots', () => ({
|
||||
default: {
|
||||
a: 'A1',
|
||||
question1: 'answer1',
|
||||
question2: 'answer2',
|
||||
question3: 'answer3',
|
||||
},
|
||||
}));
|
||||
|
||||
describe('AssistantSdk', () => {
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
vi.setSystemTime(new Date('2023-01-01T12:30:45Z'));
|
||||
|
||||
// Reset mocks
|
||||
vi.mocked(Utils.selectEquitably).mockReset();
|
||||
vi.mocked(Utils.getCurrentDate).mockReset();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
describe('getAssistantPrompt', () => {
|
||||
it('should return a prompt with default values when minimal params are provided', () => {
|
||||
// Mock dependencies
|
||||
vi.mocked(Utils.selectEquitably).mockReturnValue({
|
||||
question1: 'answer1',
|
||||
question2: 'answer2',
|
||||
});
|
||||
vi.mocked(Utils.getCurrentDate).mockReturnValue('2023-01-01T12:30:45Z');
|
||||
|
||||
const prompt = AssistantSdk.getAssistantPrompt({});
|
||||
|
||||
expect(prompt).toContain('# Assistant Knowledge');
|
||||
expect(prompt).toContain('### Date: ');
|
||||
expect(prompt).toContain('### Web Host: ');
|
||||
expect(prompt).toContain('### User Location: ');
|
||||
expect(prompt).toContain('### Timezone: ');
|
||||
});
|
||||
|
||||
it('should include maxTokens when provided', () => {
|
||||
// Mock dependencies
|
||||
vi.mocked(Utils.selectEquitably).mockReturnValue({
|
||||
question1: 'answer1',
|
||||
question2: 'answer2',
|
||||
});
|
||||
vi.mocked(Utils.getCurrentDate).mockReturnValue('2023-01-01T12:30:45Z');
|
||||
|
||||
const prompt = AssistantSdk.getAssistantPrompt({ maxTokens: 1000 });
|
||||
|
||||
expect(prompt).toContain('Max Response Length: 1000 tokens (maximum)');
|
||||
});
|
||||
|
||||
it('should use provided userTimezone and userLocation', () => {
|
||||
// Mock dependencies
|
||||
vi.mocked(Utils.selectEquitably).mockReturnValue({
|
||||
question1: 'answer1',
|
||||
question2: 'answer2',
|
||||
});
|
||||
vi.mocked(Utils.getCurrentDate).mockReturnValue('2023-01-01T12:30:45Z');
|
||||
|
||||
const prompt = AssistantSdk.getAssistantPrompt({
|
||||
userTimezone: 'America/New_York',
|
||||
userLocation: 'New York, USA',
|
||||
});
|
||||
|
||||
expect(prompt).toContain('### User Location: New York, USA');
|
||||
expect(prompt).toContain('### Timezone: America/New_York');
|
||||
});
|
||||
|
||||
it('should use current date when Utils.getCurrentDate is not available', () => {
|
||||
// Mock dependencies
|
||||
vi.mocked(Utils.selectEquitably).mockReturnValue({
|
||||
question1: 'answer1',
|
||||
question2: 'answer2',
|
||||
});
|
||||
vi.mocked(Utils.getCurrentDate).mockReturnValue(undefined);
|
||||
|
||||
const prompt = AssistantSdk.getAssistantPrompt({});
|
||||
|
||||
// Instead of checking for a specific date, just verify that a date is included
|
||||
expect(prompt).toMatch(/### Date: \d{4}-\d{2}-\d{2} \d{1,2}:\d{2} \d{1,2}s/);
|
||||
});
|
||||
|
||||
it('should use few_shots directly when Utils.selectEquitably is not available', () => {
|
||||
// Mock dependencies
|
||||
vi.mocked(Utils.selectEquitably).mockReturnValue(undefined);
|
||||
vi.mocked(Utils.getCurrentDate).mockReturnValue('2023-01-01T12:30:45Z');
|
||||
|
||||
const prompt = AssistantSdk.getAssistantPrompt({});
|
||||
|
||||
// The prompt should still contain examples
|
||||
expect(prompt).toContain('#### Example 1');
|
||||
// Instead of checking for specific content, just verify that examples are included
|
||||
expect(prompt).toMatch(/\*\*Human\*\*: .+\n\*\*Assistant\*\*: .+/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('useFewshots', () => {
|
||||
it('should format fewshots correctly', () => {
|
||||
const fewshots = {
|
||||
'What is the capital of France?': 'Paris is the capital of France.',
|
||||
'How do I make pasta?': 'Boil water, add pasta, cook until al dente.',
|
||||
};
|
||||
|
||||
const result = AssistantSdk.useFewshots(fewshots);
|
||||
|
||||
expect(result).toContain('#### Example 1');
|
||||
expect(result).toContain('**Human**: What is the capital of France?');
|
||||
expect(result).toContain('**Assistant**: Paris is the capital of France.');
|
||||
expect(result).toContain('#### Example 2');
|
||||
expect(result).toContain('**Human**: How do I make pasta?');
|
||||
expect(result).toContain('**Assistant**: Boil water, add pasta, cook until al dente.');
|
||||
});
|
||||
|
||||
it('should respect the limit parameter', () => {
|
||||
const fewshots = {
|
||||
Q1: 'A1',
|
||||
Q2: 'A2',
|
||||
Q3: 'A3',
|
||||
Q4: 'A4',
|
||||
Q5: 'A5',
|
||||
Q6: 'A6',
|
||||
};
|
||||
|
||||
const result = AssistantSdk.useFewshots(fewshots, 3);
|
||||
|
||||
expect(result).toContain('#### Example 1');
|
||||
expect(result).toContain('**Human**: Q1');
|
||||
expect(result).toContain('**Assistant**: A1');
|
||||
expect(result).toContain('#### Example 2');
|
||||
expect(result).toContain('**Human**: Q2');
|
||||
expect(result).toContain('**Assistant**: A2');
|
||||
expect(result).toContain('#### Example 3');
|
||||
expect(result).toContain('**Human**: Q3');
|
||||
expect(result).toContain('**Assistant**: A3');
|
||||
expect(result).not.toContain('#### Example 4');
|
||||
expect(result).not.toContain('**Human**: Q4');
|
||||
});
|
||||
});
|
||||
});
|
235
packages/server/src/lib/__tests__/chat-sdk.test.ts
Normal file
235
packages/server/src/lib/__tests__/chat-sdk.test.ts
Normal file
@@ -0,0 +1,235 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
|
||||
import { ProviderRepository } from '../../../../ai/providers/_ProviderRepository.ts';
|
||||
import Message from '../../models/Message.ts';
|
||||
import { AssistantSdk } from '../assistant-sdk.ts';
|
||||
import { ChatSdk } from '../chat-sdk.ts';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('../assistant-sdk', () => ({
|
||||
AssistantSdk: {
|
||||
getAssistantPrompt: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('../../models/Message', () => ({
|
||||
default: {
|
||||
create: vi.fn(message => message),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('../../providers/_ProviderRepository', () => ({
|
||||
ProviderRepository: {
|
||||
getModelFamily: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('ChatSdk', () => {
|
||||
beforeEach(() => {
|
||||
// Reset mocks
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('preprocess', () => {
|
||||
it('should return an assistant message with empty content', async () => {
|
||||
const messages = [{ role: 'user', content: 'Hello' }];
|
||||
|
||||
const result = await ChatSdk.preprocess({ messages });
|
||||
|
||||
expect(Message.create).toHaveBeenCalledWith({
|
||||
role: 'assistant',
|
||||
content: '',
|
||||
});
|
||||
expect(result).toEqual({
|
||||
role: 'assistant',
|
||||
content: '',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleChatRequest', () => {
|
||||
it('should return a 400 response if no messages are provided', async () => {
|
||||
const request = {
|
||||
json: vi.fn().mockResolvedValue({ messages: [] }),
|
||||
};
|
||||
const ctx = {
|
||||
openai: {},
|
||||
systemPrompt: 'System prompt',
|
||||
maxTokens: 1000,
|
||||
env: {
|
||||
SERVER_COORDINATOR: {
|
||||
idFromName: vi.fn(),
|
||||
get: vi.fn(),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const response = await ChatSdk.handleChatRequest(request as any, ctx as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(await response.text()).toBe('No messages provided');
|
||||
});
|
||||
|
||||
it('should save stream data and return a response with streamUrl', async () => {
|
||||
const streamId = 'test-uuid';
|
||||
vi.stubGlobal('crypto', {
|
||||
randomUUID: vi.fn().mockReturnValue(streamId),
|
||||
});
|
||||
|
||||
const messages = [{ role: 'user', content: 'Hello' }];
|
||||
const model = 'gpt-4';
|
||||
const conversationId = 'conv-123';
|
||||
|
||||
const request = {
|
||||
json: vi.fn().mockResolvedValue({ messages, model, conversationId }),
|
||||
};
|
||||
|
||||
const saveStreamData = vi.fn();
|
||||
const durableObject = {
|
||||
saveStreamData,
|
||||
};
|
||||
|
||||
const ctx = {
|
||||
openai: {},
|
||||
systemPrompt: 'System prompt',
|
||||
maxTokens: 1000,
|
||||
env: {
|
||||
SERVER_COORDINATOR: {
|
||||
idFromName: vi.fn().mockReturnValue('object-id'),
|
||||
get: vi.fn().mockReturnValue(durableObject),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const response = await ChatSdk.handleChatRequest(request as any, ctx as any);
|
||||
const responseBody = await response.json();
|
||||
|
||||
expect(ctx.env.SERVER_COORDINATOR.idFromName).toHaveBeenCalledWith('stream-index');
|
||||
expect(ctx.env.SERVER_COORDINATOR.get).toHaveBeenCalledWith('object-id');
|
||||
expect(saveStreamData).toHaveBeenCalledWith(streamId, expect.stringContaining(model));
|
||||
expect(responseBody).toEqual({
|
||||
streamUrl: `/api/streams/${streamId}`,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('calculateMaxTokens', () => {
|
||||
it('should call the durable object to calculate max tokens', async () => {
|
||||
const messages = [{ role: 'user', content: 'Hello' }];
|
||||
const dynamicMaxTokens = vi.fn().mockResolvedValue(500);
|
||||
const durableObject = {
|
||||
dynamicMaxTokens,
|
||||
};
|
||||
|
||||
const ctx = {
|
||||
maxTokens: 1000,
|
||||
env: {
|
||||
SERVER_COORDINATOR: {
|
||||
idFromName: vi.fn().mockReturnValue('object-id'),
|
||||
get: vi.fn().mockReturnValue(durableObject),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
await ChatSdk.calculateMaxTokens(messages, ctx as any);
|
||||
|
||||
expect(ctx.env.SERVER_COORDINATOR.idFromName).toHaveBeenCalledWith('dynamic-token-counter');
|
||||
expect(ctx.env.SERVER_COORDINATOR.get).toHaveBeenCalledWith('object-id');
|
||||
expect(dynamicMaxTokens).toHaveBeenCalledWith(messages, 1000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildAssistantPrompt', () => {
|
||||
it('should call AssistantSdk.getAssistantPrompt with the correct parameters', () => {
|
||||
vi.mocked(AssistantSdk.getAssistantPrompt).mockReturnValue('Assistant prompt');
|
||||
|
||||
const result = ChatSdk.buildAssistantPrompt({ maxTokens: 1000 });
|
||||
|
||||
expect(AssistantSdk.getAssistantPrompt).toHaveBeenCalledWith({
|
||||
maxTokens: 1000,
|
||||
userTimezone: 'UTC',
|
||||
userLocation: 'USA/unknown',
|
||||
});
|
||||
expect(result).toBe('Assistant prompt');
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildMessageChain', () => {
|
||||
// TODO: Fix this test
|
||||
// it('should build a message chain with system role for most models', async () => {
|
||||
// vi.mocked(ProviderRepository.getModelFamily).mockResolvedValue('openai');
|
||||
//
|
||||
// const messages = [{ role: 'user', content: 'Hello' }];
|
||||
//
|
||||
// const opts = {
|
||||
// systemPrompt: 'System prompt',
|
||||
// assistantPrompt: 'Assistant prompt',
|
||||
// toolResults: { role: 'tool', content: 'Tool result' },
|
||||
// model: 'gpt-4',
|
||||
// };
|
||||
//
|
||||
// const result = await ChatSdk.buildMessageChain(messages, opts as any);
|
||||
//
|
||||
// expect(ProviderRepository.getModelFamily).toHaveBeenCalledWith('gpt-4', undefined);
|
||||
// expect(Message.create).toHaveBeenCalledTimes(3);
|
||||
// expect(Message.create).toHaveBeenNthCalledWith(1, {
|
||||
// role: 'system',
|
||||
// content: 'System prompt',
|
||||
// });
|
||||
// expect(Message.create).toHaveBeenNthCalledWith(2, {
|
||||
// role: 'assistant',
|
||||
// content: 'Assistant prompt',
|
||||
// });
|
||||
// expect(Message.create).toHaveBeenNthCalledWith(3, {
|
||||
// role: 'user',
|
||||
// content: 'Hello',
|
||||
// });
|
||||
// });
|
||||
// TODO: Fix this test
|
||||
// it('should build a message chain with assistant role for o1, gemma, claude, or google models', async () => {
|
||||
// vi.mocked(ProviderRepository.getModelFamily).mockResolvedValue('claude');
|
||||
//
|
||||
// const messages = [{ role: 'user', content: 'Hello' }];
|
||||
//
|
||||
// const opts = {
|
||||
// systemPrompt: 'System prompt',
|
||||
// assistantPrompt: 'Assistant prompt',
|
||||
// toolResults: { role: 'tool', content: 'Tool result' },
|
||||
// model: 'claude-3',
|
||||
// };
|
||||
//
|
||||
// const result = await ChatSdk.buildMessageChain(messages, opts as any);
|
||||
//
|
||||
// expect(ProviderRepository.getModelFamily).toHaveBeenCalledWith('claude-3', undefined);
|
||||
// expect(Message.create).toHaveBeenCalledTimes(3);
|
||||
// expect(Message.create).toHaveBeenNthCalledWith(1, {
|
||||
// role: 'assistant',
|
||||
// content: 'System prompt',
|
||||
// });
|
||||
// });
|
||||
// TODO: Fix this test
|
||||
// it('should filter out messages with empty content', async () => {
|
||||
// //
|
||||
// vi.mocked(ProviderRepository.getModelFamily).mockResolvedValue('openai');
|
||||
//
|
||||
// const messages = [
|
||||
// { role: 'user', content: 'Hello' },
|
||||
// { role: 'user', content: '' },
|
||||
// { role: 'user', content: ' ' },
|
||||
// { role: 'user', content: 'World' },
|
||||
// ];
|
||||
//
|
||||
// const opts = {
|
||||
// systemPrompt: 'System prompt',
|
||||
// assistantPrompt: 'Assistant prompt',
|
||||
// toolResults: { role: 'tool', content: 'Tool result' },
|
||||
// model: 'gpt-4',
|
||||
// };
|
||||
//
|
||||
// const result = await ChatSdk.buildMessageChain(messages, opts as any);
|
||||
//
|
||||
// // 2 system/assistant messages + 2 user messages (Hello and World)
|
||||
// expect(Message.create).toHaveBeenCalledTimes(4);
|
||||
// });
|
||||
});
|
||||
});
|
41
packages/server/src/lib/__tests__/debug-utils.test.ts
Normal file
41
packages/server/src/lib/__tests__/debug-utils.test.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
|
||||
import { Utils } from '../utils.ts';
|
||||
|
||||
describe('Debug Utils.getSeason', () => {
|
||||
it('should print out the actual seasons for different dates', () => {
|
||||
// Test dates with more specific focus on boundaries
|
||||
const dates = [
|
||||
// June boundary (month 5)
|
||||
'2023-06-20', // June 20
|
||||
'2023-06-21', // June 21
|
||||
'2023-06-22', // June 22
|
||||
'2023-06-23', // June 23
|
||||
|
||||
// September boundary (month 8)
|
||||
'2023-09-20', // September 20
|
||||
'2023-09-21', // September 21
|
||||
'2023-09-22', // September 22
|
||||
'2023-09-23', // September 23
|
||||
'2023-09-24', // September 24
|
||||
|
||||
// Also check the implementation directly
|
||||
'2023-06-22', // month === 5 && day > 21 should be Summer
|
||||
'2023-09-23', // month === 8 && day > 22 should be Autumn
|
||||
];
|
||||
|
||||
// Print out the actual seasons
|
||||
console.log('Date | Month | Day | Season');
|
||||
console.log('-----|-------|-----|-------');
|
||||
dates.forEach(date => {
|
||||
const d = new Date(date);
|
||||
const month = d.getMonth();
|
||||
const day = d.getDate();
|
||||
const season = Utils.getSeason(date);
|
||||
console.log(`${date} | ${month} | ${day} | ${season}`);
|
||||
});
|
||||
|
||||
// This test will always pass, it's just for debugging
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
});
|
191
packages/server/src/lib/__tests__/handleStreamData.test.ts
Normal file
191
packages/server/src/lib/__tests__/handleStreamData.test.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
|
||||
import handleStreamData from '../handleStreamData.ts';
|
||||
|
||||
describe('handleStreamData', () => {
|
||||
// Setup mocks
|
||||
const mockController = {
|
||||
enqueue: vi.fn(),
|
||||
};
|
||||
const mockEncoder = {
|
||||
encode: vi.fn(str => str),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
it('should return early if data type is not "chat"', () => {
|
||||
const handler = handleStreamData(mockController as any, mockEncoder as any);
|
||||
|
||||
handler({ type: 'not-chat', data: {} });
|
||||
|
||||
expect(mockController.enqueue).not.toHaveBeenCalled();
|
||||
expect(mockEncoder.encode).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return early if data is undefined', () => {
|
||||
const handler = handleStreamData(mockController as any, mockEncoder as any);
|
||||
|
||||
handler(undefined as any);
|
||||
|
||||
expect(mockController.enqueue).not.toHaveBeenCalled();
|
||||
expect(mockEncoder.encode).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle content_block_start type data', () => {
|
||||
const handler = handleStreamData(mockController as any, mockEncoder as any);
|
||||
|
||||
const data = {
|
||||
type: 'chat',
|
||||
data: {
|
||||
type: 'content_block_start',
|
||||
content_block: {
|
||||
type: 'text',
|
||||
text: 'Hello world',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
handler(data);
|
||||
|
||||
expect(mockController.enqueue).toHaveBeenCalledTimes(1);
|
||||
expect(mockEncoder.encode).toHaveBeenCalledWith(expect.stringContaining('Hello world'));
|
||||
|
||||
const encodedData = mockEncoder.encode.mock.calls[0][0];
|
||||
const parsedData = JSON.parse(encodedData.split('data: ')[1]);
|
||||
|
||||
expect(parsedData.type).toBe('chat');
|
||||
expect(parsedData.data.choices[0].delta.content).toBe('Hello world');
|
||||
});
|
||||
|
||||
it('should handle delta.text type data', () => {
|
||||
const handler = handleStreamData(mockController as any, mockEncoder as any);
|
||||
|
||||
const data = {
|
||||
type: 'chat',
|
||||
data: {
|
||||
delta: {
|
||||
text: 'Hello world',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
handler(data);
|
||||
|
||||
expect(mockController.enqueue).toHaveBeenCalledTimes(1);
|
||||
expect(mockEncoder.encode).toHaveBeenCalledWith(expect.stringContaining('Hello world'));
|
||||
|
||||
const encodedData = mockEncoder.encode.mock.calls[0][0];
|
||||
const parsedData = JSON.parse(encodedData.split('data: ')[1]);
|
||||
|
||||
expect(parsedData.type).toBe('chat');
|
||||
expect(parsedData.data.choices[0].delta.content).toBe('Hello world');
|
||||
});
|
||||
|
||||
it('should handle choices[0].delta.content type data', () => {
|
||||
const handler = handleStreamData(mockController as any, mockEncoder as any);
|
||||
|
||||
const data = {
|
||||
type: 'chat',
|
||||
data: {
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {
|
||||
content: 'Hello world',
|
||||
},
|
||||
logprobs: null,
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
handler(data);
|
||||
|
||||
expect(mockController.enqueue).toHaveBeenCalledTimes(1);
|
||||
expect(mockEncoder.encode).toHaveBeenCalledWith(expect.stringContaining('Hello world'));
|
||||
|
||||
const encodedData = mockEncoder.encode.mock.calls[0][0];
|
||||
const parsedData = JSON.parse(encodedData.split('data: ')[1]);
|
||||
|
||||
expect(parsedData.type).toBe('chat');
|
||||
expect(parsedData.data.choices[0].delta.content).toBe('Hello world');
|
||||
expect(parsedData.data.choices[0].finish_reason).toBe(null);
|
||||
});
|
||||
|
||||
it('should pass through data with choices but no delta.content', () => {
|
||||
const handler = handleStreamData(mockController as any, mockEncoder as any);
|
||||
|
||||
const data = {
|
||||
type: 'chat',
|
||||
data: {
|
||||
choices: [
|
||||
{
|
||||
index: 0,
|
||||
delta: {},
|
||||
logprobs: null,
|
||||
finish_reason: 'stop',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
handler(data);
|
||||
|
||||
expect(mockController.enqueue).toHaveBeenCalledTimes(1);
|
||||
expect(mockEncoder.encode).toHaveBeenCalledWith(
|
||||
expect.stringContaining('"finish_reason":"stop"'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return early for unrecognized data format', () => {
|
||||
const handler = handleStreamData(mockController as any, mockEncoder as any);
|
||||
|
||||
const data = {
|
||||
type: 'chat',
|
||||
data: {
|
||||
// No recognized properties
|
||||
unrecognized: 'property',
|
||||
},
|
||||
};
|
||||
|
||||
handler(data);
|
||||
|
||||
expect(mockController.enqueue).not.toHaveBeenCalled();
|
||||
expect(mockEncoder.encode).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should use custom transform function if provided', () => {
|
||||
const handler = handleStreamData(mockController as any, mockEncoder as any);
|
||||
|
||||
const data = {
|
||||
type: 'chat',
|
||||
data: {
|
||||
original: 'data',
|
||||
},
|
||||
};
|
||||
|
||||
const transformFn = vi.fn().mockReturnValue({
|
||||
type: 'chat',
|
||||
data: {
|
||||
choices: [
|
||||
{
|
||||
delta: {
|
||||
content: 'Transformed content',
|
||||
},
|
||||
logprobs: null,
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
handler(data, transformFn);
|
||||
|
||||
expect(transformFn).toHaveBeenCalledWith(data);
|
||||
expect(mockController.enqueue).toHaveBeenCalledTimes(1);
|
||||
expect(mockEncoder.encode).toHaveBeenCalledWith(expect.stringContaining('Transformed content'));
|
||||
});
|
||||
});
|
194
packages/server/src/lib/__tests__/utils.test.ts
Normal file
194
packages/server/src/lib/__tests__/utils.test.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
import { Utils } from '../utils.ts';
|
||||
|
||||
describe('Utils', () => {
|
||||
describe('getSeason', () => {
|
||||
// Based on the actual behavior from debug tests (months are 0-indexed in JavaScript):
|
||||
// Winter: month < 2 (Jan, Feb) OR month === 2 && day <= 20 (Mar 1-20) OR month === 11 (Dec)
|
||||
// Spring: (month === 2 && day > 20) (Mar 21-31) OR month === 3 || month === 4 (Apr, May) OR (month === 5 && day <= 21) (Jun 1-21)
|
||||
// Summer: (month === 5 && day > 21) (Jun 22-30) OR month === 6 || month === 7 (Jul, Aug) OR (month === 8 && day <= 22) (Sep 1-22)
|
||||
// Autumn: (month === 8 && day > 22) (Sep 23-30) OR month === 9 || month === 10 (Oct, Nov)
|
||||
|
||||
it('should return Winter for dates in winter in Northern Hemisphere', () => {
|
||||
expect(Utils.getSeason('2023-01-15')).toBe('Winter'); // January (month 0)
|
||||
expect(Utils.getSeason('2023-02-15')).toBe('Winter'); // February (month 1)
|
||||
expect(Utils.getSeason('2023-03-20')).toBe('Winter'); // March 20 (month 2)
|
||||
expect(Utils.getSeason('2023-12-15')).toBe('Winter'); // December (month 11)
|
||||
});
|
||||
|
||||
it('should return Spring for dates in spring in Northern Hemisphere', () => {
|
||||
expect(Utils.getSeason('2023-03-25')).toBe('Spring'); // March 25 (month 2)
|
||||
expect(Utils.getSeason('2023-04-15')).toBe('Spring'); // April (month 3)
|
||||
expect(Utils.getSeason('2023-05-15')).toBe('Spring'); // May (month 4)
|
||||
expect(Utils.getSeason('2023-06-21')).toBe('Spring'); // June 21 (month 5)
|
||||
});
|
||||
|
||||
it('should return Summer for dates in summer in Northern Hemisphere', () => {
|
||||
expect(Utils.getSeason('2023-06-23')).toBe('Summer'); // June 23 (month 5)
|
||||
expect(Utils.getSeason('2023-07-15')).toBe('Summer'); // July (month 6)
|
||||
expect(Utils.getSeason('2023-08-15')).toBe('Summer'); // August (month 7)
|
||||
expect(Utils.getSeason('2023-09-22')).toBe('Summer'); // September 22 (month 8)
|
||||
});
|
||||
|
||||
it('should return Autumn for dates in autumn in Northern Hemisphere', () => {
|
||||
expect(Utils.getSeason('2023-09-24')).toBe('Autumn'); // September 24 (month 8)
|
||||
expect(Utils.getSeason('2023-10-15')).toBe('Autumn'); // October (month 9)
|
||||
expect(Utils.getSeason('2023-11-15')).toBe('Autumn'); // November (month 10)
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTimezone', () => {
|
||||
const originalDateTimeFormat = Intl.DateTimeFormat;
|
||||
|
||||
beforeEach(() => {
|
||||
// Mock Intl.DateTimeFormat
|
||||
global.Intl.DateTimeFormat = vi.fn().mockReturnValue({
|
||||
resolvedOptions: vi.fn().mockReturnValue({
|
||||
timeZone: 'America/New_York',
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original
|
||||
global.Intl.DateTimeFormat = originalDateTimeFormat;
|
||||
});
|
||||
|
||||
it('should return the provided timezone if available', () => {
|
||||
expect(Utils.getTimezone('Europe/London')).toBe('Europe/London');
|
||||
});
|
||||
|
||||
it('should return the system timezone if no timezone is provided', () => {
|
||||
expect(Utils.getTimezone(undefined)).toBe('America/New_York');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCurrentDate', () => {
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
vi.setSystemTime(new Date('2023-01-01T12:30:45Z'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should return the current date as an ISO string', () => {
|
||||
expect(Utils.getCurrentDate()).toBe('2023-01-01T12:30:45.000Z');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isAssetUrl', () => {
|
||||
it('should return true for URLs starting with /assets/', () => {
|
||||
expect(Utils.isAssetUrl('https://example.com/assets/image.png')).toBe(true);
|
||||
expect(Utils.isAssetUrl('http://localhost:8080/assets/script.js')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for URLs not starting with /assets/', () => {
|
||||
expect(Utils.isAssetUrl('https://example.com/api/data')).toBe(false);
|
||||
expect(Utils.isAssetUrl('http://localhost:8080/images/logo.png')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('selectEquitably', () => {
|
||||
beforeEach(() => {
|
||||
// Mock Math.random to return predictable values
|
||||
vi.spyOn(Math, 'random').mockReturnValue(0.5);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should select items equitably from multiple sources', () => {
|
||||
const sources = {
|
||||
a: { key1: 'value1', key2: 'value2' },
|
||||
b: { key3: 'value3', key4: 'value4' },
|
||||
c: { key5: 'value5', key6: 'value6' },
|
||||
d: { key7: 'value7', key8: 'value8' },
|
||||
};
|
||||
|
||||
const result = Utils.selectEquitably(sources, 4);
|
||||
|
||||
expect(Object.keys(result).length).toBe(4);
|
||||
// Due to the mocked Math.random, the selection should be deterministic
|
||||
// but we can't predict the exact keys due to the sort, so we just check the count
|
||||
});
|
||||
|
||||
it('should handle itemCount greater than available items', () => {
|
||||
const sources = {
|
||||
a: { key1: 'value1' },
|
||||
b: { key2: 'value2' },
|
||||
c: {},
|
||||
d: {},
|
||||
};
|
||||
|
||||
const result = Utils.selectEquitably(sources, 5);
|
||||
|
||||
expect(Object.keys(result).length).toBe(2);
|
||||
expect(result).toHaveProperty('key1');
|
||||
expect(result).toHaveProperty('key2');
|
||||
});
|
||||
|
||||
it('should handle empty sources', () => {
|
||||
const sources = {
|
||||
a: {},
|
||||
b: {},
|
||||
c: {},
|
||||
d: {},
|
||||
};
|
||||
|
||||
const result = Utils.selectEquitably(sources, 5);
|
||||
|
||||
expect(Object.keys(result).length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('normalizeWithBlanks', () => {
|
||||
it('should insert blank messages to maintain user/assistant alternation', () => {
|
||||
const messages = [
|
||||
{ role: 'user', content: 'Hello' },
|
||||
{ role: 'user', content: 'How are you?' },
|
||||
];
|
||||
|
||||
const result = Utils.normalizeWithBlanks(messages);
|
||||
|
||||
expect(result.length).toBe(3);
|
||||
expect(result[0]).toEqual({ role: 'user', content: 'Hello' });
|
||||
expect(result[1]).toEqual({ role: 'assistant', content: '' });
|
||||
expect(result[2]).toEqual({ role: 'user', content: 'How are you?' });
|
||||
});
|
||||
|
||||
it('should insert blank user message if first message is assistant', () => {
|
||||
const messages = [{ role: 'assistant', content: 'Hello, how can I help?' }];
|
||||
|
||||
const result = Utils.normalizeWithBlanks(messages);
|
||||
|
||||
expect(result.length).toBe(2);
|
||||
expect(result[0]).toEqual({ role: 'user', content: '' });
|
||||
expect(result[1]).toEqual({ role: 'assistant', content: 'Hello, how can I help?' });
|
||||
});
|
||||
|
||||
it('should handle empty array', () => {
|
||||
const messages: any[] = [];
|
||||
|
||||
const result = Utils.normalizeWithBlanks(messages);
|
||||
|
||||
expect(result.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle already alternating messages', () => {
|
||||
const messages = [
|
||||
{ role: 'user', content: 'Hello' },
|
||||
{ role: 'assistant', content: 'Hi there' },
|
||||
{ role: 'user', content: 'How are you?' },
|
||||
];
|
||||
|
||||
const result = Utils.normalizeWithBlanks(messages);
|
||||
|
||||
expect(result.length).toBe(3);
|
||||
expect(result).toEqual(messages);
|
||||
});
|
||||
});
|
||||
});
|
56
packages/server/src/lib/assistant-sdk.ts
Normal file
56
packages/server/src/lib/assistant-sdk.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import few_shots from '../prompts/few_shots.ts';
|
||||
|
||||
import { Utils } from './utils.ts';
|
||||
|
||||
export class AssistantSdk {
|
||||
static getAssistantPrompt(params: {
|
||||
maxTokens?: number;
|
||||
userTimezone?: string;
|
||||
userLocation?: string;
|
||||
}): string {
|
||||
const { maxTokens, userTimezone = 'UTC', userLocation = '' } = params;
|
||||
// console.log('[DEBUG_LOG] few_shots:', JSON.stringify(few_shots));
|
||||
let selectedFewshots = Utils.selectEquitably?.(few_shots);
|
||||
// console.log('[DEBUG_LOG] selectedFewshots after Utils.selectEquitably:', JSON.stringify(selectedFewshots));
|
||||
if (!selectedFewshots) {
|
||||
selectedFewshots = few_shots;
|
||||
// console.log('[DEBUG_LOG] selectedFewshots after fallback:', JSON.stringify(selectedFewshots));
|
||||
}
|
||||
const sdkDate = new Date().toISOString();
|
||||
const [currentDate] = sdkDate.includes('T') ? sdkDate.split('T') : [sdkDate];
|
||||
const now = new Date();
|
||||
const formattedMinutes = String(now.getMinutes()).padStart(2, '0');
|
||||
const currentTime = `${now.getHours()}:${formattedMinutes} ${now.getSeconds()}s`;
|
||||
|
||||
return `# Assistant Knowledge
|
||||
## Current Context
|
||||
### Date: ${currentDate} ${currentTime}
|
||||
### Web Host: open-gsio.seemueller.workers.dev
|
||||
${maxTokens ? `### Max Response Length: ${maxTokens} tokens (maximum)` : ''}
|
||||
### Lexicographical Format: Markdown
|
||||
### User Location: ${userLocation || 'Unknown'}
|
||||
### Timezone: ${userTimezone}
|
||||
## Response Framework
|
||||
1. Use knowledge provided in the current context as the primary source of truth.
|
||||
2. Format all responses in Markdown.
|
||||
3. Attribute external sources with footnotes.
|
||||
## Examples
|
||||
#### Example 0
|
||||
**Human**: What is this?
|
||||
**Assistant**: This is a conversational AI system.
|
||||
---
|
||||
${AssistantSdk.useFewshots(selectedFewshots, 5)}
|
||||
---
|
||||
## Directive
|
||||
Continuously monitor the evolving conversation. Dynamically adapt each response.`;
|
||||
}
|
||||
|
||||
static useFewshots(fewshots: Record<string, string>, limit = 5): string {
|
||||
return Object.entries(fewshots)
|
||||
.slice(0, limit)
|
||||
.map(([q, a], i) => {
|
||||
return `#### Example ${i + 1}\n**Human**: ${q}\n**Assistant**: ${a}`;
|
||||
})
|
||||
.join('\n---\n');
|
||||
}
|
||||
}
|
131
packages/server/src/lib/chat-sdk.ts
Normal file
131
packages/server/src/lib/chat-sdk.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import { ProviderRepository } from '@open-gsio/ai/providers/_ProviderRepository.ts';
|
||||
import type { Instance } from 'mobx-state-tree';
|
||||
import { OpenAI } from 'openai';
|
||||
|
||||
import Message from '../models/Message.ts';
|
||||
|
||||
import { AssistantSdk } from './assistant-sdk.ts';
|
||||
|
||||
export class ChatSdk {
|
||||
static async preprocess({ messages }) {
|
||||
// run processing on messages to generate events/context
|
||||
return Message.create({
|
||||
role: 'assistant',
|
||||
content: '',
|
||||
});
|
||||
}
|
||||
|
||||
static async handleChatRequest(
|
||||
request: Request,
|
||||
ctx: {
|
||||
openai: OpenAI;
|
||||
systemPrompt: any;
|
||||
maxTokens: any;
|
||||
env: Env;
|
||||
},
|
||||
) {
|
||||
const streamId = crypto.randomUUID();
|
||||
const { messages, model, conversationId } = await request.json();
|
||||
|
||||
if (!messages?.length) {
|
||||
return new Response('No messages provided', { status: 400 });
|
||||
}
|
||||
|
||||
const preprocessedContext = await ChatSdk.preprocess({
|
||||
messages,
|
||||
});
|
||||
// console.log(ctx.env)
|
||||
// console.log(ctx.env.SERVER_COORDINATOR);
|
||||
|
||||
const objectId = ctx.env.SERVER_COORDINATOR.idFromName('stream-index');
|
||||
const durableObject = ctx.env.SERVER_COORDINATOR.get(objectId);
|
||||
|
||||
await durableObject.saveStreamData(
|
||||
streamId,
|
||||
JSON.stringify({
|
||||
messages,
|
||||
model,
|
||||
conversationId,
|
||||
timestamp: Date.now(),
|
||||
systemPrompt: ctx.systemPrompt,
|
||||
preprocessedContext,
|
||||
}),
|
||||
);
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
streamUrl: `/api/streams/${streamId}`,
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
static async calculateMaxTokens(
|
||||
messages: any[],
|
||||
ctx: Record<string, any> & {
|
||||
env: Env;
|
||||
maxTokens: number;
|
||||
},
|
||||
) {
|
||||
const objectId = ctx.env.SERVER_COORDINATOR.idFromName('dynamic-token-counter');
|
||||
const durableObject = ctx.env.SERVER_COORDINATOR.get(objectId);
|
||||
return durableObject.dynamicMaxTokens(messages, ctx.maxTokens);
|
||||
}
|
||||
|
||||
static buildAssistantPrompt({ maxTokens }) {
|
||||
return AssistantSdk.getAssistantPrompt({
|
||||
maxTokens,
|
||||
userTimezone: 'UTC',
|
||||
userLocation: 'USA/unknown',
|
||||
});
|
||||
}
|
||||
|
||||
static async buildMessageChain(
|
||||
messages: any[],
|
||||
opts: {
|
||||
systemPrompt: any;
|
||||
assistantPrompt: string;
|
||||
toolResults: Instance<typeof Message>;
|
||||
model: any;
|
||||
env: Env;
|
||||
},
|
||||
) {
|
||||
const modelFamily = await ProviderRepository.getModelFamily(opts.model, opts.env);
|
||||
|
||||
const messagesToSend = [];
|
||||
|
||||
messagesToSend.push(
|
||||
Message.create({
|
||||
role:
|
||||
opts.model.includes('o1') ||
|
||||
opts.model.includes('gemma') ||
|
||||
modelFamily === 'claude' ||
|
||||
modelFamily === 'google'
|
||||
? 'assistant'
|
||||
: 'system',
|
||||
content: opts.systemPrompt.trim(),
|
||||
}),
|
||||
);
|
||||
|
||||
messagesToSend.push(
|
||||
Message.create({
|
||||
role: 'assistant',
|
||||
content: opts.assistantPrompt.trim(),
|
||||
}),
|
||||
);
|
||||
|
||||
messagesToSend.push(
|
||||
...messages
|
||||
.filter((message: any) => message.content?.trim())
|
||||
.map((message: any) => Message.create(message)),
|
||||
);
|
||||
|
||||
return messagesToSend;
|
||||
}
|
||||
}
|
||||
|
||||
export default ChatSdk;
|
93
packages/server/src/lib/handleStreamData.ts
Normal file
93
packages/server/src/lib/handleStreamData.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
interface StreamChoice {
|
||||
index?: number;
|
||||
delta: {
|
||||
content: string;
|
||||
};
|
||||
logprobs: null;
|
||||
finish_reason: string | null;
|
||||
}
|
||||
|
||||
interface StreamResponse {
|
||||
type: string;
|
||||
data: {
|
||||
choices?: StreamChoice[];
|
||||
delta?: {
|
||||
text?: string;
|
||||
};
|
||||
type?: string;
|
||||
content_block?: {
|
||||
type: string;
|
||||
text: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
const handleStreamData = (controller: ReadableStreamDefaultController, encoder: TextEncoder) => {
|
||||
return (data: StreamResponse, transformFn?: (data: StreamResponse) => StreamResponse) => {
|
||||
if (!data?.type || data.type !== 'chat') {
|
||||
return;
|
||||
}
|
||||
|
||||
let transformedData: StreamResponse;
|
||||
|
||||
if (transformFn) {
|
||||
transformedData = transformFn(data);
|
||||
} else {
|
||||
if (data.data.type === 'content_block_start' && data.data.content_block?.type === 'text') {
|
||||
transformedData = {
|
||||
type: 'chat',
|
||||
data: {
|
||||
choices: [
|
||||
{
|
||||
delta: {
|
||||
content: data.data.content_block.text || '',
|
||||
},
|
||||
logprobs: null,
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
} else if (data.data.delta?.text) {
|
||||
transformedData = {
|
||||
type: 'chat',
|
||||
data: {
|
||||
choices: [
|
||||
{
|
||||
delta: {
|
||||
content: data.data.delta.text,
|
||||
},
|
||||
logprobs: null,
|
||||
finish_reason: null,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
} else if (data.data.choices?.[0]?.delta?.content) {
|
||||
transformedData = {
|
||||
type: 'chat',
|
||||
data: {
|
||||
choices: [
|
||||
{
|
||||
index: data.data.choices[0].index,
|
||||
delta: {
|
||||
content: data.data.choices[0].delta.content,
|
||||
},
|
||||
logprobs: null,
|
||||
finish_reason: data.data.choices[0].finish_reason,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
} else if (data.data.choices) {
|
||||
transformedData = data;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(transformedData)}\n\n`));
|
||||
};
|
||||
};
|
||||
|
||||
export default handleStreamData;
|
89
packages/server/src/lib/utils.ts
Normal file
89
packages/server/src/lib/utils.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
export class Utils {
|
||||
static getSeason(date: string): string {
|
||||
const hemispheres = {
|
||||
Northern: ['Winter', 'Spring', 'Summer', 'Autumn'],
|
||||
Southern: ['Summer', 'Autumn', 'Winter', 'Spring'],
|
||||
};
|
||||
const d = new Date(date);
|
||||
const month = d.getMonth();
|
||||
const day = d.getDate();
|
||||
const hemisphere = 'Northern';
|
||||
|
||||
if (month < 2 || (month === 2 && day <= 20) || month === 11) return hemispheres[hemisphere][0];
|
||||
if (month < 5 || (month === 5 && day <= 21)) return hemispheres[hemisphere][1];
|
||||
if (month < 8 || (month === 8 && day <= 22)) return hemispheres[hemisphere][2];
|
||||
return hemispheres[hemisphere][3];
|
||||
}
|
||||
static getTimezone(timezone) {
|
||||
if (timezone) {
|
||||
return timezone;
|
||||
}
|
||||
return Intl.DateTimeFormat().resolvedOptions().timeZone;
|
||||
}
|
||||
|
||||
static getCurrentDate() {
|
||||
return new Date().toISOString();
|
||||
}
|
||||
|
||||
static isAssetUrl(url) {
|
||||
const { pathname } = new URL(url);
|
||||
return pathname.startsWith('/assets/');
|
||||
}
|
||||
|
||||
static selectEquitably({ a, b, c, d }, itemCount = 9) {
|
||||
const sources = [a, b, c, d];
|
||||
const result = {};
|
||||
|
||||
let combinedItems = [];
|
||||
sources.forEach((source, index) => {
|
||||
combinedItems.push(...Object.keys(source).map(key => ({ source: index, key })));
|
||||
});
|
||||
|
||||
combinedItems = combinedItems.sort(() => Math.random() - 0.5);
|
||||
|
||||
let selectedCount = 0;
|
||||
while (selectedCount < itemCount && combinedItems.length > 0) {
|
||||
const { source, key } = combinedItems.shift();
|
||||
const sourceObject = sources[source];
|
||||
|
||||
if (!result[key]) {
|
||||
result[key] = sourceObject[key];
|
||||
selectedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static normalizeWithBlanks<T extends NormalizeChatMessage>(msgs: T[]): T[] {
|
||||
const out: T[] = [];
|
||||
|
||||
// In local mode first turn expected to be user.
|
||||
let expected: NormalizeRole = 'user';
|
||||
|
||||
for (const m of msgs) {
|
||||
while (m.role !== expected) {
|
||||
// Insert blanks to match expected sequence user/assistant/user...
|
||||
out.push(makeNormalizeBlank(expected) as T);
|
||||
expected = expected === 'user' ? 'assistant' : 'user';
|
||||
}
|
||||
|
||||
out.push(m);
|
||||
expected = expected === 'user' ? 'assistant' : 'user';
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize module exports
|
||||
export type NormalizeRole = 'user' | 'assistant';
|
||||
|
||||
export interface NormalizeChatMessage extends Record<any, any> {
|
||||
role: NormalizeRole;
|
||||
}
|
||||
|
||||
export const makeNormalizeBlank = (role: NormalizeRole): NormalizeChatMessage => ({
|
||||
role,
|
||||
content: '',
|
||||
});
|
9
packages/server/src/models/ContactRecord.ts
Normal file
9
packages/server/src/models/ContactRecord.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { types } from 'mobx-state-tree';
|
||||
|
||||
export default types.model('ContactRecord', {
|
||||
message: types.string,
|
||||
timestamp: types.string,
|
||||
email: types.string,
|
||||
firstname: types.string,
|
||||
lastname: types.string,
|
||||
});
|
10
packages/server/src/models/FeedbackRecord.ts
Normal file
10
packages/server/src/models/FeedbackRecord.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
// FeedbackRecord.ts
|
||||
import { types } from 'mobx-state-tree';
|
||||
|
||||
const FeedbackRecord = types.model('FeedbackRecord', {
|
||||
feedback: types.string,
|
||||
timestamp: types.string,
|
||||
user: types.optional(types.string, 'Anonymous'),
|
||||
});
|
||||
|
||||
export default FeedbackRecord;
|
18
packages/server/src/models/Message.ts
Normal file
18
packages/server/src/models/Message.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
// Base Message
|
||||
import { type Instance, types } from 'mobx-state-tree';
|
||||
|
||||
export default types
|
||||
.model('Message', {
|
||||
content: types.string,
|
||||
role: types.enumeration(['user', 'assistant', 'system']),
|
||||
})
|
||||
.actions(self => ({
|
||||
setContent(newContent: string) {
|
||||
self.content = newContent;
|
||||
},
|
||||
append(newContent: string) {
|
||||
self.content += newContent;
|
||||
},
|
||||
}));
|
||||
|
||||
export type MessageType = Instance<typeof this>;
|
20
packages/server/src/models/O1Message.ts
Normal file
20
packages/server/src/models/O1Message.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { types } from 'mobx-state-tree';
|
||||
|
||||
export default types
|
||||
.model('O1Message', {
|
||||
role: types.enumeration(['user', 'assistant', 'system']),
|
||||
content: types.array(
|
||||
types.model({
|
||||
type: types.string,
|
||||
text: types.string,
|
||||
}),
|
||||
),
|
||||
})
|
||||
.actions(self => ({
|
||||
setContent(newContent: string, contentType: string = 'text') {
|
||||
self.content = [{ type: contentType, text: newContent }];
|
||||
},
|
||||
append(newContent: string, contentType: string = 'text') {
|
||||
self.content.push({ type: contentType, text: newContent });
|
||||
},
|
||||
}));
|
16
packages/server/src/models/OpenAiMessage.ts
Normal file
16
packages/server/src/models/OpenAiMessage.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
// Models
|
||||
import { types } from 'mobx-state-tree';
|
||||
|
||||
export default types
|
||||
.model('Message', {
|
||||
content: types.string,
|
||||
role: types.enumeration(['user', 'assistant', 'system']),
|
||||
})
|
||||
.actions(self => ({
|
||||
setContent(newContent: string) {
|
||||
self.content = newContent;
|
||||
},
|
||||
append(newContent: string) {
|
||||
self.content += newContent;
|
||||
},
|
||||
}));
|
117
packages/server/src/prompts/few_shots.ts
Normal file
117
packages/server/src/prompts/few_shots.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
/* eslint-disable no-useless-escape */
|
||||
const a = {
|
||||
'Can you search for the latest quantum computing breakthroughs?': "I don't have web access.",
|
||||
};
|
||||
|
||||
const b = {
|
||||
'Calculate the number of unique ways to arrange the letters in the word SUCCESS.': `Alright, let's tackle 'SUCCESS' and see just how unique it can get!
|
||||
### Step 1: Count the Letters
|
||||
The word 'SUCCESS' has 7 letters, but it’s no free-for-all—there are repeats!
|
||||
- S: 3 times
|
||||
- U: 1 time
|
||||
- C: 2 times
|
||||
- E: 1 time
|
||||
### Step 2: The Formula for Permutations with Repetition
|
||||
When letters repeat, we divide to avoid overcounting. The magic formula is:
|
||||
$$
|
||||
\frac{n!}{k_1! \cdot k_2! \cdot \ldots \cdot k_m!}
|
||||
$$
|
||||
Here, (n) is the total number of letters, and (k_i) are the frequencies of each unique letter.
|
||||
### Step 3. Plug in the Values
|
||||
Let’s compute:
|
||||
$$
|
||||
\frac{7!}{3! \cdot 1! \cdot 2! \cdot 1!} = 420
|
||||
$$
|
||||
|
||||
### Step 4: Reveal the Result
|
||||
There are 420 unique arrangements of 'SUCCESS'. Mathematically satisfying, isn’t it? It’s as if every version of success gets its own unique permutation!`,
|
||||
|
||||
'Calculate the number of unique ways to arrange the letters in the word MISSISSIPPI.': `Ready for some permutation wizardry? Let’s decode 'MISSISSIPPI'!
|
||||
### Step 1: Count the Letters
|
||||
'MISSISSIPPI' has 11 letters. But like a social network with cliques, some letters appear in multiples:
|
||||
- M: 1
|
||||
- I: 4
|
||||
- S: 4
|
||||
- P: 2
|
||||
### Step 2: Apply the Permutation Formula for Repetition
|
||||
The formula for such a case is:
|
||||
$$
|
||||
\frac{n!}{k_1! \cdot k_2! \cdot \ldots \cdot k_m!}
|
||||
$$
|
||||
### Step 3: Plug in the Values
|
||||
Here’s where we let the math do its magic:
|
||||
$$
|
||||
\frac{11!}{1! \cdot 4! \cdot 4! \cdot 2!}
|
||||
$$
|
||||
### Step 4: Simplify Step-by-Step
|
||||
Let’s break it down:
|
||||
- (11! = 39,916,800)
|
||||
- (4! = 24)
|
||||
- (2! = 2)
|
||||
#### Substituting these values:
|
||||
$$
|
||||
\frac{39,916,800}{1 \cdot 24 \cdot 24 \cdot 2} = 34,650
|
||||
$$
|
||||
### Step 5: The Grand Finale
|
||||
There are 34,650 unique arrangements of 'MISSISSIPPI'.`,
|
||||
};
|
||||
|
||||
const c = {
|
||||
'Render a Greek statue with warm marble tones and realistic proportions.':
|
||||
"I don't have the ability to generate images right now. ",
|
||||
};
|
||||
|
||||
const d = {
|
||||
'Demonstrate markdown formatted text.': `
|
||||
## Headers
|
||||
~~~markdown
|
||||
# Large Header
|
||||
## Medium Header
|
||||
### Small Header
|
||||
~~~
|
||||
## Ordered Lists
|
||||
1. First Item
|
||||
2. Second Item
|
||||
1. Subitem 1
|
||||
2. Subitem 2
|
||||
3. Third Item
|
||||
## Unordered Lists
|
||||
- First Item
|
||||
- Second Item
|
||||
- Subitem 1
|
||||
- Subitem 2
|
||||
## Links
|
||||
[Visit OpenAI](https://openai.com/)
|
||||
## Images
|
||||

|
||||
## Inline Code
|
||||
\`console.log('Hello, Markdown!')\`
|
||||
## Code Blocks
|
||||
~~~javascript
|
||||
console.log(marked.parse('A Description List:\\n'
|
||||
+ ': Topic 1 : Description 1\\n'
|
||||
+ ': **Topic 2** : *Description 2*'));
|
||||
~~~
|
||||
## Tables
|
||||
| Name | Value |
|
||||
|---------|-------|
|
||||
| Item A | 10 |
|
||||
| Item B | 20 |
|
||||
## Blockquotes
|
||||
> Markdown makes writing beautiful.
|
||||
> - Markdown Fan
|
||||
## Horizontal Rule
|
||||
---
|
||||
## Font: Bold and Italic
|
||||
**Bold Text**
|
||||
*Italic Text*
|
||||
## Font: Strikethrough
|
||||
~~Struck-through text~~
|
||||
## Math
|
||||
~~~markdown
|
||||
$$
|
||||
c = \\\\pm\\\\sqrt{a^2 + b^2}
|
||||
$$`,
|
||||
};
|
||||
|
||||
export default { a, b, c, d };
|
5
packages/server/src/router/index.ts
Normal file
5
packages/server/src/router/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { createRouter } from './router.ts';
|
||||
|
||||
export default {
|
||||
Router: createRouter,
|
||||
};
|
84
packages/server/src/router/router.ts
Normal file
84
packages/server/src/router/router.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import { Router, withParams } from 'itty-router';
|
||||
|
||||
import { createRequestContext } from '../../RequestContext.ts';
|
||||
|
||||
export function createRouter() {
|
||||
return (
|
||||
Router()
|
||||
.get('/assets/*', (r, e, c) => {
|
||||
const { assetService } = createRequestContext(e, c);
|
||||
return assetService.handleStaticAssets(r, e, c);
|
||||
})
|
||||
|
||||
.post('/api/contact', (r, e, c) => {
|
||||
const { contactService } = createRequestContext(e, c);
|
||||
return contactService.handleContact(r);
|
||||
})
|
||||
|
||||
.post('/api/chat', (r, e, c) => {
|
||||
const { chatService } = createRequestContext(e, c);
|
||||
return chatService.handleChatRequest(r);
|
||||
})
|
||||
|
||||
.get('/api/streams/:streamId', withParams, async ({ streamId }, env, ctx) => {
|
||||
const { chatService } = createRequestContext(env, ctx);
|
||||
return chatService.handleSseStream(streamId); // Handles SSE for streamId
|
||||
})
|
||||
|
||||
.get('/api/models', async (req, env, ctx) => {
|
||||
const { chatService } = createRequestContext(env, ctx);
|
||||
return chatService.getSupportedModels();
|
||||
})
|
||||
|
||||
.post('/api/feedback', async (r, e, c) => {
|
||||
const { feedbackService } = createRequestContext(e, c);
|
||||
return feedbackService.handleFeedback(r);
|
||||
})
|
||||
|
||||
.post('/api/tx', async (r, e, c) => {
|
||||
const { transactionService } = createRequestContext(e, c);
|
||||
return transactionService.handleTransact(r);
|
||||
})
|
||||
|
||||
// used for file handling, can be enabled but is not fully implemented in this fork.
|
||||
// .post('/api/documents', async (r, e, c) => {
|
||||
// const {documentService} = createServerContext(e, c);
|
||||
// return documentService.handlePutDocument(r)
|
||||
// })
|
||||
//
|
||||
// .get('/api/documents', async (r, e, c) => {
|
||||
// const {documentService} = createServerContext(e, c);
|
||||
// return documentService.handleGetDocument(r)
|
||||
// })
|
||||
|
||||
.get('/api/metrics*', async (r, e, c) => {
|
||||
const { metricsService } = createRequestContext(e, c);
|
||||
return metricsService.handleMetricsRequest(r);
|
||||
})
|
||||
|
||||
.post('/api/metrics*', async (r, e, c) => {
|
||||
const { metricsService } = createRequestContext(e, c);
|
||||
return metricsService.handleMetricsRequest(r);
|
||||
})
|
||||
|
||||
// renders the app
|
||||
.all('^(?!/api/)(?!/assets/).*$', async (r, e, c) => {
|
||||
const { assetService } = createRequestContext(e, c);
|
||||
|
||||
// First attempt to serve pre-rendered HTML
|
||||
const preRenderedHtml = await assetService.handleStaticAssets(r, e);
|
||||
if (preRenderedHtml !== null) {
|
||||
return preRenderedHtml;
|
||||
}
|
||||
|
||||
// If no pre-rendered HTML, attempt SSR
|
||||
const ssrResponse = await assetService.handleSsr(r.url, r.headers, e);
|
||||
if (ssrResponse !== null) {
|
||||
return ssrResponse;
|
||||
}
|
||||
|
||||
// Finally, proxy to static assets if nothing else matched
|
||||
return assetService.handleStaticAssets(r, e);
|
||||
})
|
||||
);
|
||||
}
|
9
packages/server/src/server/build.ts
Normal file
9
packages/server/src/server/build.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
// handles builds the server into js
|
||||
await Bun.build({
|
||||
entrypoints: ['./server.ts'],
|
||||
outdir: './build',
|
||||
minify: true,
|
||||
target: 'node',
|
||||
splitting: true,
|
||||
throw: true,
|
||||
});
|
98
packages/server/src/server/server.ts
Normal file
98
packages/server/src/server/server.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { readdir } from 'node:fs/promises';
|
||||
|
||||
import ServerCoordinator from '@open-gsio/durable-objects/src/ServerCoordinatorBun.ts';
|
||||
import { config } from 'dotenv';
|
||||
import type { RequestLike } from 'itty-router';
|
||||
|
||||
import Router from '../router';
|
||||
import { BunSqliteKVNamespace } from '../storage/BunSqliteKVNamespace.ts';
|
||||
|
||||
const router = Router.Router();
|
||||
|
||||
config({
|
||||
path: '.env',
|
||||
debug: true,
|
||||
// defaults: {
|
||||
// EVENTSOURCE_HOST: "https://eventsource.seemueller.io",
|
||||
// }
|
||||
});
|
||||
|
||||
export default {
|
||||
port: 3003,
|
||||
fetch: async (request: RequestLike, env: { [key: string]: any }, ctx: any) => {
|
||||
// console.log("[trace] request: ", request.method, request.url, "headers: ", request.headers.get("referer"), "body: ", request.body, "env: ", env, "ctx: ", ctx, "")
|
||||
|
||||
env['SERVER_COORDINATOR'] = ServerCoordinator;
|
||||
env['ASSETS'] = assetHandler.ASSETS;
|
||||
env['EVENTSOURCE_HOST'] = process.env.EVENTSOURCE_HOST;
|
||||
env['GROQ_API_KEY'] = process.env.GROQ_API_KEY;
|
||||
env['ANTHROPIC_API_KEY'] = process.env.ANTHROPIC_API_KEY;
|
||||
env['FIREWORKS_API_KEY'] = process.env.FIREWORKS_API_KEY;
|
||||
env['XAI_API_KEY'] = process.env.XAI_API_KEY;
|
||||
env['CEREBRAS_API_KEY'] = process.env.CEREBRAS_API_KEY;
|
||||
env['CLOUDFLARE_API_KEY'] = process.env.CLOUDFLARE_API_KEY;
|
||||
env['CLOUDFLARE_ACCOUNT_ID'] = process.env.CLOUDFLARE_ACCOUNT_ID;
|
||||
env['MLX_API_KEY'] = process.env.MLX_API_KEY;
|
||||
env['OLLAMA_API_KEY'] = process.env.OLLAMA_API_KEY;
|
||||
env['KV_STORAGE'] = new BunSqliteKVNamespace({ namespace: 'open-gsio' });
|
||||
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const timeout = new Promise((_, reject) =>
|
||||
setTimeout(() => {
|
||||
controller.abort();
|
||||
reject(new Error('Request timeout after 5s'));
|
||||
}, 5000),
|
||||
);
|
||||
|
||||
return await Promise.race([router.fetch(request, env, ctx), timeout]);
|
||||
} catch (e) {
|
||||
console.error('Error handling request:', e);
|
||||
return new Response('Server Error', { status: 500 });
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
export const assetHandler = {
|
||||
ASSETS: {
|
||||
/**
|
||||
* Fetches the requested static asset from local dist
|
||||
*
|
||||
* @param {Request} request - The incoming Fetch API Request object.
|
||||
* @returns {Promise<Response>} A Promise that resolves with the Response for the requested asset,
|
||||
* or a 404 Response if the asset is not found or an error occurs.
|
||||
*/
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
// Serialize incoming request URL
|
||||
const originalUrl = new URL(request.url);
|
||||
const url = new URL(request.url);
|
||||
|
||||
// List all files in the public directory
|
||||
const PUBLIC_DIR = new URL('../client/public/', import.meta.url).pathname;
|
||||
const publicFiles = await readdir(PUBLIC_DIR, { recursive: true });
|
||||
|
||||
// Get the filename from pathname and remove any path traversal attempts
|
||||
const filename = url.pathname.split('/').pop()?.replace(/\.\./g, '') || '';
|
||||
|
||||
const isStatic = publicFiles.some(file => file === filename);
|
||||
|
||||
if (url.pathname === '/') {
|
||||
url.pathname = '/index.html';
|
||||
} else if (isStatic && !url.pathname.startsWith('/static')) {
|
||||
// leave it alone
|
||||
} else if (isStatic) {
|
||||
url.pathname = `/static${url.pathname}`;
|
||||
}
|
||||
|
||||
const dist = new URL('../client/dist/client', import.meta.url).pathname;
|
||||
|
||||
try {
|
||||
return new Response(Bun.file(`${dist}${url.pathname}`));
|
||||
} catch (error) {
|
||||
// Log the error with the original requested path
|
||||
console.error(`Error reading asset from path ${originalUrl.pathname}:`, error);
|
||||
return new Response(null, { status: 404 });
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
48
packages/server/src/services/AssetService.ts
Normal file
48
packages/server/src/services/AssetService.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import renderPage from '@open-gsio/client/server';
|
||||
import { types } from 'mobx-state-tree';
|
||||
|
||||
export default types
|
||||
.model('StaticAssetStore', {})
|
||||
.volatile(self => ({
|
||||
env: {} as Env,
|
||||
ctx: {} as ExecutionContext,
|
||||
}))
|
||||
.actions(self => ({
|
||||
setEnv(env: Env) {
|
||||
self.env = env;
|
||||
},
|
||||
setCtx(ctx: ExecutionContext) {
|
||||
self.ctx = ctx;
|
||||
},
|
||||
async handleSsr(url: string, headers: Headers, env: Vike.PageContext.env) {
|
||||
const pageContextInit = {
|
||||
urlOriginal: url,
|
||||
headersOriginal: headers,
|
||||
fetch: (...args: Parameters<typeof fetch>) => fetch(...args),
|
||||
env,
|
||||
};
|
||||
|
||||
const pageContext = await renderPage(pageContextInit);
|
||||
const { httpResponse } = pageContext;
|
||||
|
||||
if (!httpResponse) {
|
||||
return null;
|
||||
} else {
|
||||
const { statusCode: status, headers: responseHeaders } = httpResponse;
|
||||
|
||||
// Create a new Headers object and remove Content-Length for streaming.
|
||||
const newHeaders = new Headers(responseHeaders);
|
||||
newHeaders.delete('Content-Length');
|
||||
|
||||
return new Response(httpResponse.pipe, { headers: newHeaders, status });
|
||||
}
|
||||
},
|
||||
async handleStaticAssets(request: Request, env) {
|
||||
try {
|
||||
return await env.ASSETS.fetch(request);
|
||||
} catch (error) {
|
||||
console.error('Error serving static asset:', error);
|
||||
return new Response('Asset not found', { status: 404 });
|
||||
}
|
||||
},
|
||||
}));
|
443
packages/server/src/services/ChatService.ts
Normal file
443
packages/server/src/services/ChatService.ts
Normal file
@@ -0,0 +1,443 @@
|
||||
/* eslint-disable no-irregular-whitespace */
|
||||
import {
|
||||
CerebrasChatProvider,
|
||||
CerebrasSdk,
|
||||
ClaudeChatSdk,
|
||||
CloudflareAISdk,
|
||||
FireworksAiChatSdk,
|
||||
GroqChatSdk,
|
||||
MlxOmniChatSdk,
|
||||
OllamaChatSdk,
|
||||
XaiChatSdk,
|
||||
} from '@open-gsio/ai';
|
||||
import { GoogleChatSdk } from '@open-gsio/ai/providers/google.ts';
|
||||
import { OpenAiChatSdk } from '@open-gsio/ai/providers/openai.ts';
|
||||
import { flow, getSnapshot, types } from 'mobx-state-tree';
|
||||
import OpenAI from 'openai';
|
||||
|
||||
import { ProviderRepository } from '../../../ai/providers/_ProviderRepository.ts';
|
||||
import ChatSdk from '../lib/chat-sdk.ts';
|
||||
import handleStreamData from '../lib/handleStreamData.ts';
|
||||
import Message from '../models/Message.ts';
|
||||
import O1Message from '../models/O1Message.ts';
|
||||
|
||||
export interface StreamParams {
|
||||
env: Env;
|
||||
openai: OpenAI;
|
||||
messages: any[];
|
||||
model: string;
|
||||
systemPrompt: string;
|
||||
preprocessedContext: any;
|
||||
maxTokens: number;
|
||||
}
|
||||
|
||||
const activeStreamType = types.model({
|
||||
name: types.optional(types.string, ''),
|
||||
maxTokens: types.optional(types.number, 0),
|
||||
systemPrompt: types.optional(types.string, ''),
|
||||
model: types.optional(types.string, ''),
|
||||
messages: types.optional(types.array(types.frozen()), []),
|
||||
});
|
||||
|
||||
const activeStreamsMap = types.map(activeStreamType);
|
||||
|
||||
const ChatService = types
|
||||
.model('ChatService', {
|
||||
openAIApiKey: types.optional(types.string, ''),
|
||||
openAIBaseURL: types.optional(types.string, ''),
|
||||
activeStreams: types.optional(activeStreamsMap, {}),
|
||||
maxTokens: types.number,
|
||||
systemPrompt: types.string,
|
||||
})
|
||||
.volatile(self => ({
|
||||
openai: {} as OpenAI,
|
||||
env: {} as Env,
|
||||
}))
|
||||
.actions(self => {
|
||||
// Helper functions
|
||||
const createMessageInstance = (message: any) => {
|
||||
if (typeof message.content === 'string') {
|
||||
return Message.create({
|
||||
role: message.role,
|
||||
content: message.content,
|
||||
});
|
||||
}
|
||||
if (Array.isArray(message.content)) {
|
||||
const m = O1Message.create({
|
||||
role: message.role,
|
||||
content: message.content.map(item => ({
|
||||
type: item.type,
|
||||
text: item.text,
|
||||
})),
|
||||
});
|
||||
return m;
|
||||
}
|
||||
throw new Error('Unsupported message format');
|
||||
};
|
||||
|
||||
const createStreamParams = async (
|
||||
streamConfig: any,
|
||||
dynamicContext: any,
|
||||
durableObject: any,
|
||||
): Promise<StreamParams> => {
|
||||
return {
|
||||
env: self.env,
|
||||
openai: self.openai,
|
||||
messages: streamConfig.messages.map(createMessageInstance),
|
||||
model: streamConfig.model,
|
||||
systemPrompt: streamConfig.systemPrompt,
|
||||
preprocessedContext: getSnapshot(dynamicContext),
|
||||
maxTokens: await durableObject.dynamicMaxTokens(streamConfig.messages, 2000),
|
||||
};
|
||||
};
|
||||
|
||||
const modelHandlers = {
|
||||
openai: (params: StreamParams, dataHandler: (data: any) => any) =>
|
||||
OpenAiChatSdk.handleOpenAiStream(params, dataHandler),
|
||||
groq: (params: StreamParams, dataHandler: (data: any) => any) =>
|
||||
GroqChatSdk.handleGroqStream(params, dataHandler),
|
||||
claude: (params: StreamParams, dataHandler: (data: any) => any) =>
|
||||
ClaudeChatSdk.handleClaudeStream(params, dataHandler),
|
||||
fireworks: (params: StreamParams, dataHandler: (data: any) => any) =>
|
||||
FireworksAiChatSdk.handleFireworksStream(params, dataHandler),
|
||||
google: (params: StreamParams, dataHandler: (data: any) => any) =>
|
||||
GoogleChatSdk.handleGoogleStream(params, dataHandler),
|
||||
xai: (params: StreamParams, dataHandler: (data: any) => any) =>
|
||||
XaiChatSdk.handleXaiStream(params, dataHandler),
|
||||
cerebras: (params: StreamParams, dataHandler: (data: any) => any) =>
|
||||
CerebrasSdk.handleCerebrasStream(params, dataHandler),
|
||||
cloudflareAI: (params: StreamParams, dataHandler: (data: any) => any) =>
|
||||
CloudflareAISdk.handleCloudflareAIStream(params, dataHandler),
|
||||
ollama: (params: StreamParams, dataHandler: (data: any) => any) =>
|
||||
OllamaChatSdk.handleOllamaStream(params, dataHandler),
|
||||
mlx: (params: StreamParams, dataHandler: (data: any) => any) =>
|
||||
MlxOmniChatSdk.handleMlxOmniStream(params, dataHandler),
|
||||
};
|
||||
|
||||
return {
|
||||
getSupportedModels: flow(function* (): Generator<Promise<unknown>, Response, unknown> {
|
||||
// ----- Helpers ----------------------------------------------------------
|
||||
const logger = console;
|
||||
|
||||
const useCache = true;
|
||||
|
||||
if (useCache) {
|
||||
// ----- 1. Try cached value ---------------------------------------------
|
||||
try {
|
||||
const cached = yield self.env.KV_STORAGE.get('supportedModels');
|
||||
if (cached) {
|
||||
const parsed = JSON.parse(cached as string);
|
||||
if (Array.isArray(parsed) && parsed.length > 0) {
|
||||
logger.info('Cache hit – returning supportedModels from KV');
|
||||
return new Response(JSON.stringify(parsed), { status: 200 });
|
||||
}
|
||||
logger.warn('Cache entry malformed – refreshing');
|
||||
throw new Error('Malformed cache entry');
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Error reading/parsing supportedModels cache', err);
|
||||
}
|
||||
}
|
||||
|
||||
// ----- 2. Build fresh list ---------------------------------------------
|
||||
const providerRepo = new ProviderRepository(self.env);
|
||||
const providers = providerRepo.getProviders();
|
||||
|
||||
const providerModels = new Map<string, any[]>();
|
||||
const modelMeta = new Map<string, any>();
|
||||
|
||||
for (const provider of providers) {
|
||||
if (!provider.key) continue;
|
||||
|
||||
logger.info(`Fetching models from «${provider.endpoint}»`);
|
||||
|
||||
const openai = new OpenAI({ apiKey: provider.key, baseURL: provider.endpoint });
|
||||
|
||||
// 2‑a. List models
|
||||
try {
|
||||
const listResp = yield openai.models.list(); // <‑‑ async
|
||||
const models = 'data' in listResp ? listResp.data : listResp;
|
||||
providerModels.set(provider.name, models);
|
||||
|
||||
// 2‑b. Retrieve metadata
|
||||
for (const mdl of models) {
|
||||
try {
|
||||
const meta = yield openai.models.retrieve(mdl.id); // <‑‑ async
|
||||
modelMeta.set(mdl.id, { ...mdl, ...meta });
|
||||
} catch (err) {
|
||||
// logger.error(`Metadata fetch failed for ${mdl.id}`, err);
|
||||
modelMeta.set(mdl.id, { provider: provider.name, mdl });
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Model list failed for provider «${provider.name}»`, err);
|
||||
}
|
||||
}
|
||||
|
||||
// ----- 3. Merge results -------------------------------------------------
|
||||
const resultMap = new Map<string, any>();
|
||||
for (const [provName, models] of providerModels) {
|
||||
for (const mdl of models) {
|
||||
resultMap.set(mdl.id, {
|
||||
id: mdl.id,
|
||||
provider: provName,
|
||||
...(modelMeta.get(mdl.id) ?? mdl),
|
||||
});
|
||||
}
|
||||
}
|
||||
const resultArr = Array.from(resultMap.values());
|
||||
|
||||
// ----- 4. Cache fresh list ---------------------------------------------
|
||||
try {
|
||||
yield self.env.KV_STORAGE.put(
|
||||
'supportedModels',
|
||||
JSON.stringify(resultArr),
|
||||
{ expirationTtl: 60 * 60 * 24 }, // 24
|
||||
);
|
||||
logger.info('supportedModels cache refreshed');
|
||||
} catch (err) {
|
||||
logger.error('KV put failed for supportedModels', err);
|
||||
}
|
||||
|
||||
// ----- 5. Return --------------------------------------------------------
|
||||
return new Response(JSON.stringify(resultArr), { status: 200 });
|
||||
}),
|
||||
setActiveStream(streamId: string, stream: any) {
|
||||
const validStream = {
|
||||
name: stream?.name || 'Unnamed Stream',
|
||||
maxTokens: stream?.maxTokens || 0,
|
||||
systemPrompt: stream?.systemPrompt || '',
|
||||
model: stream?.model || '',
|
||||
messages: stream?.messages || [],
|
||||
};
|
||||
|
||||
self.activeStreams.set(streamId, validStream);
|
||||
},
|
||||
|
||||
removeActiveStream(streamId: string) {
|
||||
self.activeStreams.delete(streamId);
|
||||
},
|
||||
setEnv(env: Env) {
|
||||
self.env = env;
|
||||
|
||||
if (env.OPENAI_API_ENDPOINT && env.OPENAI_API_ENDPOINT.includes('localhost')) {
|
||||
self.openai = new OpenAI({
|
||||
apiKey: self.env.OPENAI_API_KEY,
|
||||
baseURL: self.env.OPENAI_API_ENDPOINT,
|
||||
});
|
||||
} else {
|
||||
self.openai = new OpenAI({
|
||||
apiKey: self.openAIApiKey,
|
||||
baseURL: self.openAIBaseURL,
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
handleChatRequest: async (request: Request) => {
|
||||
return ChatSdk.handleChatRequest(request, {
|
||||
openai: self.openai,
|
||||
env: self.env,
|
||||
systemPrompt: self.systemPrompt,
|
||||
maxTokens: self.maxTokens,
|
||||
});
|
||||
},
|
||||
|
||||
async runModelHandler(params: {
|
||||
streamConfig: any;
|
||||
streamParams: any;
|
||||
controller: ReadableStreamDefaultController;
|
||||
encoder: TextEncoder;
|
||||
streamId: string;
|
||||
}) {
|
||||
const { streamConfig, streamParams, controller, encoder, streamId } = params;
|
||||
|
||||
const modelFamily = await ProviderRepository.getModelFamily(streamConfig.model, self.env);
|
||||
|
||||
const useModelHandler = () => {
|
||||
return modelHandlers[modelFamily];
|
||||
};
|
||||
|
||||
const handler = useModelHandler();
|
||||
|
||||
if (handler) {
|
||||
try {
|
||||
await handler(streamParams, handleStreamData(controller, encoder));
|
||||
} catch (error) {
|
||||
const message = error.message.toLowerCase();
|
||||
|
||||
if (
|
||||
message.includes('413 ') ||
|
||||
message.includes('maximum') ||
|
||||
message.includes('too long') ||
|
||||
message.includes('too large')
|
||||
) {
|
||||
throw new ClientError(
|
||||
`Error! Content length exceeds limits. Try shortening your message or editing an earlier message.`,
|
||||
413,
|
||||
{
|
||||
model: streamConfig.model,
|
||||
maxTokens: streamParams.maxTokens,
|
||||
},
|
||||
);
|
||||
}
|
||||
if (message.includes('429 ')) {
|
||||
throw new ClientError(
|
||||
`Error! Rate limit exceeded. Wait a few minutes before trying again.`,
|
||||
429,
|
||||
{
|
||||
model: streamConfig.model,
|
||||
maxTokens: streamParams.maxTokens,
|
||||
},
|
||||
);
|
||||
}
|
||||
if (message.includes('404')) {
|
||||
throw new ClientError(`Something went wrong, try again.`, 413, {});
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
createSseReadableStream(params: {
|
||||
streamId: string;
|
||||
streamConfig: any;
|
||||
savedStreamConfig: string;
|
||||
durableObject: any;
|
||||
}) {
|
||||
const { streamId, streamConfig, savedStreamConfig, durableObject } = params;
|
||||
|
||||
return new ReadableStream({
|
||||
async start(controller) {
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
try {
|
||||
const dynamicContext = Message.create(streamConfig.preprocessedContext);
|
||||
|
||||
// Process the stream data using the appropriate handler
|
||||
const streamParams = await createStreamParams(
|
||||
streamConfig,
|
||||
dynamicContext,
|
||||
durableObject,
|
||||
);
|
||||
|
||||
await self.runModelHandler({
|
||||
streamConfig,
|
||||
streamParams,
|
||||
controller,
|
||||
encoder,
|
||||
streamId,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`chatService::handleSseStream::${streamId}::Error`, error);
|
||||
|
||||
if (error instanceof ClientError) {
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
`data: ${JSON.stringify({ type: 'error', error: error.message })}\n\n`,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
controller.enqueue(
|
||||
encoder.encode(
|
||||
`data: ${JSON.stringify({
|
||||
type: 'error',
|
||||
error: 'Server error',
|
||||
})}\n\n`,
|
||||
),
|
||||
);
|
||||
}
|
||||
controller.close();
|
||||
} finally {
|
||||
try {
|
||||
controller.close();
|
||||
} catch (_) {
|
||||
// Ignore errors when closing the controller, as it might already be closed
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
handleSseStream: flow(function* (
|
||||
streamId: string,
|
||||
): Generator<Promise<string>, Response, unknown> {
|
||||
// Check if a stream is already active for this ID
|
||||
if (self.activeStreams.has(streamId)) {
|
||||
return new Response('Stream already active', { status: 409 });
|
||||
}
|
||||
|
||||
// Retrieve the stream configuration from the durable object
|
||||
const objectId = self.env.SERVER_COORDINATOR.idFromName('stream-index');
|
||||
const durableObject = self.env.SERVER_COORDINATOR.get(objectId);
|
||||
const savedStreamConfig = yield durableObject.getStreamData(streamId);
|
||||
|
||||
if (!savedStreamConfig) {
|
||||
return new Response('Stream not found', { status: 404 });
|
||||
}
|
||||
|
||||
const streamConfig = JSON.parse(savedStreamConfig);
|
||||
|
||||
const stream = self.createSseReadableStream({
|
||||
streamId,
|
||||
streamConfig,
|
||||
savedStreamConfig,
|
||||
durableObject,
|
||||
});
|
||||
|
||||
// Use `tee()` to create two streams: one for processing and one for the response
|
||||
const [processingStream, responseStream] = stream.tee();
|
||||
|
||||
self.setActiveStream(streamId, {
|
||||
...streamConfig,
|
||||
});
|
||||
|
||||
processingStream.pipeTo(
|
||||
new WritableStream({
|
||||
close() {
|
||||
self.removeActiveStream(streamId);
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
// Return the second stream as the response
|
||||
return new Response(responseStream, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
},
|
||||
});
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
/**
|
||||
* ClientError
|
||||
* A custom construct for sending client-friendly errors via the controller in a structured and controlled manner.
|
||||
*/
|
||||
export class ClientError extends Error {
|
||||
public statusCode: number;
|
||||
public details: Record<string, any>;
|
||||
|
||||
constructor(message: string, statusCode: number, details: Record<string, any> = {}) {
|
||||
super(message);
|
||||
this.name = 'ClientError';
|
||||
this.statusCode = statusCode;
|
||||
this.details = details;
|
||||
Object.setPrototypeOf(this, ClientError.prototype);
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats the error for SSE-compatible data transmission.
|
||||
*/
|
||||
public formatForSSE(): string {
|
||||
return JSON.stringify({
|
||||
type: 'error',
|
||||
message: this.message,
|
||||
details: this.details,
|
||||
statusCode: this.statusCode,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default ChatService;
|
53
packages/server/src/services/ContactService.ts
Normal file
53
packages/server/src/services/ContactService.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
// ContactService.ts
|
||||
import { types, flow, getSnapshot } from 'mobx-state-tree';
|
||||
|
||||
import ContactRecord from '../models/ContactRecord.ts';
|
||||
|
||||
export default types
|
||||
.model('ContactStore', {})
|
||||
.volatile(self => ({
|
||||
env: {} as Env,
|
||||
ctx: {} as ExecutionContext,
|
||||
}))
|
||||
.actions(self => ({
|
||||
setEnv(env: Env) {
|
||||
self.env = env;
|
||||
},
|
||||
setCtx(ctx: ExecutionContext) {
|
||||
self.ctx = ctx;
|
||||
},
|
||||
handleContact: flow(function* (request: Request) {
|
||||
try {
|
||||
const { markdown: message, email, firstname, lastname } = yield request.json();
|
||||
const contactRecord = ContactRecord.create({
|
||||
message,
|
||||
timestamp: new Date().toISOString(),
|
||||
email,
|
||||
firstname,
|
||||
lastname,
|
||||
});
|
||||
const contactId = crypto.randomUUID();
|
||||
yield self.env.KV_STORAGE.put(
|
||||
`contact:${contactId}`,
|
||||
JSON.stringify(getSnapshot(contactRecord)),
|
||||
);
|
||||
|
||||
yield self.env.EMAIL_SERVICE.sendMail({
|
||||
to: 'geoff@seemueller.io',
|
||||
plaintextMessage: `WEBSITE CONTACT FORM SUBMISSION
|
||||
${firstname} ${lastname}
|
||||
${email}
|
||||
${message}`,
|
||||
});
|
||||
|
||||
return new Response('Contact record saved successfully', {
|
||||
status: 200,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error processing contact request:', error);
|
||||
return new Response('Failed to process contact request', {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
}),
|
||||
}));
|
54
packages/server/src/services/FeedbackService.ts
Normal file
54
packages/server/src/services/FeedbackService.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { types, flow, getSnapshot } from 'mobx-state-tree';
|
||||
|
||||
import FeedbackRecord from '../models/FeedbackRecord.ts';
|
||||
|
||||
export default types
|
||||
.model('FeedbackStore', {})
|
||||
.volatile(self => ({
|
||||
env: {} as Env,
|
||||
ctx: {} as ExecutionContext,
|
||||
}))
|
||||
.actions(self => ({
|
||||
setEnv(env: Env) {
|
||||
self.env = env;
|
||||
},
|
||||
setCtx(ctx: ExecutionContext) {
|
||||
self.ctx = ctx;
|
||||
},
|
||||
handleFeedback: flow(function* (request: Request) {
|
||||
try {
|
||||
const {
|
||||
feedback,
|
||||
timestamp = new Date().toISOString(),
|
||||
user = 'Anonymous',
|
||||
} = yield request.json();
|
||||
|
||||
const feedbackRecord = FeedbackRecord.create({
|
||||
feedback,
|
||||
timestamp,
|
||||
user,
|
||||
});
|
||||
|
||||
const feedbackId = crypto.randomUUID();
|
||||
yield self.env.KV_STORAGE.put(
|
||||
`feedback:${feedbackId}`,
|
||||
JSON.stringify(getSnapshot(feedbackRecord)),
|
||||
);
|
||||
|
||||
yield self.env.EMAIL_SERVICE.sendMail({
|
||||
to: 'geoff@seemueller.io',
|
||||
plaintextMessage: `NEW FEEDBACK SUBMISSION
|
||||
User: ${user}
|
||||
Feedback: ${feedback}
|
||||
Timestamp: ${timestamp}`,
|
||||
});
|
||||
|
||||
return new Response('Feedback saved successfully', { status: 200 });
|
||||
} catch (error) {
|
||||
console.error('Error processing feedback request:', error);
|
||||
return new Response('Failed to process feedback request', {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
}),
|
||||
}));
|
54
packages/server/src/services/MetricsService.ts
Normal file
54
packages/server/src/services/MetricsService.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { types, flow } from 'mobx-state-tree';
|
||||
|
||||
const MetricsService = types
|
||||
.model('MetricsService', {
|
||||
isCollectingMetrics: types.optional(types.boolean, true),
|
||||
})
|
||||
.volatile(self => ({
|
||||
env: {} as Env,
|
||||
ctx: {} as ExecutionContext,
|
||||
}))
|
||||
.actions(self => ({
|
||||
setEnv(env: Env) {
|
||||
self.env = env;
|
||||
},
|
||||
setCtx(ctx: ExecutionContext) {
|
||||
self.ctx = ctx;
|
||||
},
|
||||
handleMetricsRequest: flow(function* (request: Request) {
|
||||
const url = new URL(request.url);
|
||||
let proxyUrl = '';
|
||||
if (self.env.METRICS_HOST) {
|
||||
proxyUrl = new URL(`${self.env.METRICS_HOST}${url.pathname}${url.search}`).toString();
|
||||
}
|
||||
|
||||
if (proxyUrl) {
|
||||
try {
|
||||
const response = yield fetch(proxyUrl, {
|
||||
method: request.method,
|
||||
headers: request.headers,
|
||||
body: ['GET', 'HEAD'].includes(request.method) ? null : request.body,
|
||||
redirect: 'follow',
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error('Failed to proxy metrics request:', error);
|
||||
return new Response('metrics misconfigured', { status: 200 });
|
||||
}
|
||||
} else {
|
||||
const event = {
|
||||
method: request.method,
|
||||
headers: request.headers,
|
||||
body: ['GET', 'HEAD'].includes(request.method) ? null : request.body,
|
||||
};
|
||||
if (self.env?.KV_STORAGE?.put) {
|
||||
self.env.KV_STORAGE.put(`metrics_events::${crypto.randomUUID()}`, JSON.stringify(event));
|
||||
} else {
|
||||
console.log('Detected metrics misconfiguration...not storing');
|
||||
}
|
||||
}
|
||||
}),
|
||||
}));
|
||||
|
||||
export default MetricsService;
|
93
packages/server/src/services/TransactionService.ts
Normal file
93
packages/server/src/services/TransactionService.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
import { types } from 'mobx-state-tree';
|
||||
|
||||
const TransactionService = types
|
||||
.model('TransactionService', {})
|
||||
.volatile(self => ({
|
||||
env: {} as Env,
|
||||
ctx: {} as ExecutionContext,
|
||||
}))
|
||||
.actions(self => ({
|
||||
setEnv(env: Env) {
|
||||
self.env = env;
|
||||
},
|
||||
setCtx(ctx: ExecutionContext) {
|
||||
self.ctx = ctx;
|
||||
},
|
||||
|
||||
routeAction: async function (action: string, requestBody: any) {
|
||||
const actionHandlers: Record<string, (data: any) => Promise<any>> = {
|
||||
PREPARE_TX: self.handlePrepareTransaction,
|
||||
};
|
||||
|
||||
const handler = actionHandlers[action];
|
||||
if (!handler) {
|
||||
throw new Error(`No handler for action: ${action}`);
|
||||
}
|
||||
|
||||
return await handler(requestBody);
|
||||
},
|
||||
|
||||
handlePrepareTransaction: async function (data: []) {
|
||||
const [donerId, currency, amount] = data;
|
||||
const CreateWalletEndpoints = {
|
||||
bitcoin: '/api/btc/create',
|
||||
ethereum: '/api/eth/create',
|
||||
dogecoin: '/api/doge/create',
|
||||
};
|
||||
|
||||
const walletRequest = await fetch(
|
||||
`https://wallets.seemueller.io${CreateWalletEndpoints[currency]}`,
|
||||
);
|
||||
const walletResponse = await walletRequest.text();
|
||||
// console.log({ walletRequest: walletResponse });
|
||||
const [address, privateKey, publicKey, phrase] = JSON.parse(walletResponse);
|
||||
|
||||
const txKey = crypto.randomUUID();
|
||||
|
||||
const txRecord = {
|
||||
txKey,
|
||||
donerId,
|
||||
currency,
|
||||
amount,
|
||||
depositAddress: address,
|
||||
privateKey,
|
||||
publicKey,
|
||||
phrase,
|
||||
};
|
||||
|
||||
// console.log({ txRecord });
|
||||
|
||||
const key = `transactions::prepared::${txKey}`;
|
||||
|
||||
await self.env.KV_STORAGE.put(key, JSON.stringify(txRecord));
|
||||
// console.log(`PREPARED TRANSACTION ${key}`);
|
||||
|
||||
return {
|
||||
depositAddress: address,
|
||||
txKey: txKey,
|
||||
};
|
||||
},
|
||||
|
||||
handleTransact: async function (request: Request) {
|
||||
try {
|
||||
const raw = await request.text();
|
||||
// console.log({ raw });
|
||||
const [action, ...payload] = raw.split(',');
|
||||
|
||||
const response = await self.routeAction(action, payload);
|
||||
|
||||
return new Response(JSON.stringify(response), {
|
||||
status: 200,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error handling transaction:', error);
|
||||
return new Response(JSON.stringify({ error: 'Transaction failed' }), {
|
||||
status: 500,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
}
|
||||
},
|
||||
}));
|
||||
|
||||
export default TransactionService;
|
165
packages/server/src/services/__tests__/AssetService.test.ts
Normal file
165
packages/server/src/services/__tests__/AssetService.test.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
|
||||
import AssetService from '../AssetService.ts';
|
||||
|
||||
// Mock the vike/server module
|
||||
vi.mock('vike/server', () => ({
|
||||
renderPage: vi.fn(),
|
||||
}));
|
||||
|
||||
// Import the mocked renderPage function for assertions
|
||||
// eslint-disable-next-line import/order
|
||||
import { renderPage } from 'vike/server';
|
||||
|
||||
describe('AssetService', () => {
|
||||
let assetService;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a new instance of the service before each test
|
||||
assetService = AssetService.create();
|
||||
|
||||
// Reset mocks
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('Initial state', () => {
|
||||
it('should have empty env and ctx objects initially', () => {
|
||||
expect(assetService.env).toEqual({});
|
||||
expect(assetService.ctx).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('setEnv', () => {
|
||||
it('should set the environment', () => {
|
||||
const mockEnv = { ASSETS: { fetch: vi.fn() } };
|
||||
assetService.setEnv(mockEnv);
|
||||
expect(assetService.env).toEqual(mockEnv);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setCtx', () => {
|
||||
it('should set the execution context', () => {
|
||||
const mockCtx = { waitUntil: vi.fn() };
|
||||
assetService.setCtx(mockCtx);
|
||||
expect(assetService.ctx).toEqual(mockCtx);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleSsr', () => {
|
||||
it('should return null when httpResponse is not available', async () => {
|
||||
// Setup mock to return a pageContext without httpResponse
|
||||
vi.mocked(renderPage).mockResolvedValue({});
|
||||
|
||||
const url = 'https://example.com';
|
||||
const headers = new Headers();
|
||||
const env = {};
|
||||
|
||||
const result = await assetService.handleSsr(url, headers, env);
|
||||
|
||||
// Verify renderPage was called with correct arguments
|
||||
expect(renderPage).toHaveBeenCalledWith({
|
||||
urlOriginal: url,
|
||||
headersOriginal: headers,
|
||||
fetch: expect.any(Function),
|
||||
env,
|
||||
});
|
||||
|
||||
// Verify result is null
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return a Response when httpResponse is available', async () => {
|
||||
// Create mock stream
|
||||
const mockStream = new ReadableStream();
|
||||
|
||||
// Setup mock to return a pageContext with httpResponse
|
||||
vi.mocked(renderPage).mockResolvedValue({
|
||||
httpResponse: {
|
||||
statusCode: 200,
|
||||
headers: new Headers({ 'Content-Type': 'text/html' }),
|
||||
getReadableWebStream: () => mockStream,
|
||||
},
|
||||
});
|
||||
|
||||
const url = 'https://example.com';
|
||||
const headers = new Headers();
|
||||
const env = {};
|
||||
|
||||
const result = await assetService.handleSsr(url, headers, env);
|
||||
|
||||
// Verify renderPage was called with correct arguments
|
||||
expect(renderPage).toHaveBeenCalledWith({
|
||||
urlOriginal: url,
|
||||
headersOriginal: headers,
|
||||
fetch: expect.any(Function),
|
||||
env,
|
||||
});
|
||||
|
||||
// Verify result is a Response with correct properties
|
||||
expect(result).toBeInstanceOf(Response);
|
||||
expect(result.status).toBe(200);
|
||||
expect(result.headers.get('Content-Type')).toBe('text/html');
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleStaticAssets', () => {
|
||||
it('should fetch assets from the environment', async () => {
|
||||
// Create mock request
|
||||
const request = new Request('https://example.com/static/image.png');
|
||||
|
||||
// Create mock response
|
||||
const mockResponse = new Response('Mock asset content', {
|
||||
status: 200,
|
||||
headers: { 'Content-Type': 'image/png' },
|
||||
});
|
||||
|
||||
// Create mock environment with ASSETS.fetch
|
||||
const mockEnv = {
|
||||
ASSETS: {
|
||||
fetch: vi.fn().mockResolvedValue(mockResponse),
|
||||
},
|
||||
};
|
||||
|
||||
// Set the environment
|
||||
assetService.setEnv(mockEnv);
|
||||
|
||||
// Call the method
|
||||
const result = await assetService.handleStaticAssets(request, mockEnv);
|
||||
|
||||
// Verify ASSETS.fetch was called with the request
|
||||
expect(mockEnv.ASSETS.fetch).toHaveBeenCalledWith(request);
|
||||
|
||||
// Verify result is the expected response
|
||||
expect(result).toBe(mockResponse);
|
||||
});
|
||||
|
||||
it('should return a 404 response when an error occurs', async () => {
|
||||
// Create mock request
|
||||
const request = new Request('https://example.com/static/not-found.png');
|
||||
|
||||
// Create mock environment with ASSETS.fetch that throws an error
|
||||
const mockEnv = {
|
||||
ASSETS: {
|
||||
fetch: vi.fn().mockRejectedValue(new Error('Asset not found')),
|
||||
},
|
||||
};
|
||||
|
||||
// Set the environment
|
||||
assetService.setEnv(mockEnv);
|
||||
|
||||
// Call the method
|
||||
const result = await assetService.handleStaticAssets(request, mockEnv);
|
||||
|
||||
// Verify ASSETS.fetch was called with the request
|
||||
expect(mockEnv.ASSETS.fetch).toHaveBeenCalledWith(request);
|
||||
|
||||
// Verify result is a 404 Response
|
||||
expect(result).toBeInstanceOf(Response);
|
||||
expect(result.status).toBe(404);
|
||||
|
||||
// Verify response body
|
||||
const text = await result.clone().text();
|
||||
expect(text).toBe('Asset not found');
|
||||
});
|
||||
});
|
||||
});
|
355
packages/server/src/services/__tests__/ChatService.test.ts
Normal file
355
packages/server/src/services/__tests__/ChatService.test.ts
Normal file
@@ -0,0 +1,355 @@
|
||||
import { getSnapshot } from 'mobx-state-tree';
|
||||
import OpenAI from 'openai';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import ChatSdk from '../../lib/chat-sdk.ts';
|
||||
import ChatService, { ClientError } from '../ChatService.ts';
|
||||
// Create mock OpenAI instance
|
||||
const mockOpenAIInstance = {
|
||||
models: {
|
||||
list: vi.fn().mockResolvedValue({
|
||||
data: [{ id: 'mlx-model-1' }, { id: 'mlx-model-2' }, { id: 'other-model' }],
|
||||
}),
|
||||
},
|
||||
chat: {
|
||||
completions: {
|
||||
create: vi.fn(),
|
||||
},
|
||||
},
|
||||
baseURL: 'http://localhost:8000',
|
||||
};
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('openai', () => {
|
||||
return {
|
||||
default: vi.fn().mockImplementation(() => mockOpenAIInstance),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../../lib/chat-sdk', () => ({
|
||||
default: {
|
||||
handleChatRequest: vi.fn(),
|
||||
buildAssistantPrompt: vi.fn(),
|
||||
buildMessageChain: vi.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('../../lib/handleStreamData', () => ({
|
||||
default: vi.fn().mockReturnValue(() => {}),
|
||||
}));
|
||||
|
||||
describe('ChatService', () => {
|
||||
let chatService;
|
||||
let mockEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a new instance of the service before each test
|
||||
chatService = ChatService.create({
|
||||
maxTokens: 2000,
|
||||
systemPrompt: 'You are a helpful assistant.',
|
||||
openAIApiKey: 'test-api-key',
|
||||
openAIBaseURL: 'https://api.openai.com/v1',
|
||||
});
|
||||
|
||||
// Create mock environment
|
||||
mockEnv = {
|
||||
OPENAI_API_KEY: 'test-api-key',
|
||||
OPENAI_API_ENDPOINT: 'https://api.openai.com/v1',
|
||||
SERVER_COORDINATOR: {
|
||||
idFromName: vi.fn().mockReturnValue('test-id'),
|
||||
get: vi.fn().mockReturnValue({
|
||||
getStreamData: vi.fn().mockResolvedValue(
|
||||
JSON.stringify({
|
||||
messages: [],
|
||||
model: 'gpt-4',
|
||||
systemPrompt: 'You are a helpful assistant.',
|
||||
preprocessedContext: {},
|
||||
}),
|
||||
),
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
// Set the environment using the action
|
||||
chatService.setEnv(mockEnv);
|
||||
|
||||
// Reset mocks
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Initial state', () => {
|
||||
it('should have the correct initial state', () => {
|
||||
const freshService = ChatService.create({
|
||||
maxTokens: 2000,
|
||||
systemPrompt: 'You are a helpful assistant.',
|
||||
});
|
||||
|
||||
expect(freshService.maxTokens).toBe(2000);
|
||||
expect(freshService.systemPrompt).toBe('You are a helpful assistant.');
|
||||
expect(freshService.activeStreams.size).toBe(0);
|
||||
expect(freshService.openAIApiKey).toBe('');
|
||||
expect(freshService.openAIBaseURL).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('setEnv', () => {
|
||||
it('should set the environment and initialize OpenAI client with local endpoint', () => {
|
||||
const localEnv = {
|
||||
...mockEnv,
|
||||
OPENAI_API_ENDPOINT: 'http://localhost:8000',
|
||||
};
|
||||
|
||||
// Reset the mock to track new calls
|
||||
vi.mocked(OpenAI).mockClear();
|
||||
|
||||
chatService.setEnv(localEnv);
|
||||
|
||||
expect(chatService.env).toEqual(localEnv);
|
||||
expect(OpenAI).toHaveBeenCalledWith({
|
||||
apiKey: localEnv.OPENAI_API_KEY,
|
||||
baseURL: localEnv.OPENAI_API_ENDPOINT,
|
||||
});
|
||||
});
|
||||
|
||||
it('should set the environment and initialize OpenAI client with API key and base URL', () => {
|
||||
// Create a new instance with the properties already set
|
||||
const service = ChatService.create({
|
||||
maxTokens: 2000,
|
||||
systemPrompt: 'You are a helpful assistant.',
|
||||
openAIApiKey: 'test-api-key',
|
||||
openAIBaseURL: 'https://api.openai.com/v1',
|
||||
});
|
||||
|
||||
// Reset the mock to track new calls
|
||||
vi.mocked(OpenAI).mockClear();
|
||||
|
||||
service.setEnv(mockEnv);
|
||||
|
||||
expect(service.env).toEqual(mockEnv);
|
||||
expect(OpenAI).toHaveBeenCalledWith({
|
||||
apiKey: 'test-api-key',
|
||||
baseURL: 'https://api.openai.com/v1',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('setActiveStream and removeActiveStream', () => {
|
||||
it('should set and remove active streams', () => {
|
||||
const streamId = 'test-stream-id';
|
||||
const streamData = {
|
||||
name: 'Test Stream',
|
||||
maxTokens: 1000,
|
||||
systemPrompt: 'You are a helpful assistant.',
|
||||
model: 'gpt-4',
|
||||
messages: [],
|
||||
};
|
||||
|
||||
// Set active stream
|
||||
chatService.setActiveStream(streamId, streamData);
|
||||
expect(chatService.activeStreams.has(streamId)).toBe(true);
|
||||
expect(getSnapshot(chatService.activeStreams.get(streamId))).toEqual(streamData);
|
||||
|
||||
// Remove active stream
|
||||
chatService.removeActiveStream(streamId);
|
||||
expect(chatService.activeStreams.has(streamId)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle missing or incomplete stream data', () => {
|
||||
const streamId = 'test-stream-id';
|
||||
|
||||
// Set active stream with undefined data
|
||||
chatService.setActiveStream(streamId, undefined);
|
||||
expect(chatService.activeStreams.has(streamId)).toBe(true);
|
||||
expect(getSnapshot(chatService.activeStreams.get(streamId))).toEqual({
|
||||
name: 'Unnamed Stream',
|
||||
maxTokens: 0,
|
||||
systemPrompt: '',
|
||||
model: '',
|
||||
messages: [],
|
||||
});
|
||||
|
||||
// Set active stream with partial data
|
||||
chatService.setActiveStream(streamId, { name: 'Partial Stream' });
|
||||
expect(chatService.activeStreams.has(streamId)).toBe(true);
|
||||
expect(getSnapshot(chatService.activeStreams.get(streamId))).toEqual({
|
||||
name: 'Partial Stream',
|
||||
maxTokens: 0,
|
||||
systemPrompt: '',
|
||||
model: '',
|
||||
messages: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSupportedModels', () => {
|
||||
it('should return local models when using localhost endpoint', async () => {
|
||||
const originalResponseJson = Response.json;
|
||||
Response.json = vi.fn().mockImplementation(data => {
|
||||
return {
|
||||
json: async () => data,
|
||||
};
|
||||
});
|
||||
|
||||
const localEnv = {
|
||||
...mockEnv,
|
||||
OPENAI_API_ENDPOINT: 'http://localhost:8000',
|
||||
};
|
||||
|
||||
// Create a new service instance for this test
|
||||
const localService = ChatService.create({
|
||||
maxTokens: 2000,
|
||||
systemPrompt: 'You are a helpful assistant.',
|
||||
});
|
||||
|
||||
localService.setEnv(localEnv);
|
||||
|
||||
// Mock the implementation of getSupportedModels for this test
|
||||
const originalGetSupportedModels = localService.getSupportedModels;
|
||||
localService.getSupportedModels = vi.fn().mockResolvedValueOnce({
|
||||
json: async () => ['mlx-model-1', 'mlx-model-2'],
|
||||
});
|
||||
|
||||
const response = await localService.getSupportedModels();
|
||||
const data = await response.json();
|
||||
|
||||
expect(data).toEqual(['mlx-model-1', 'mlx-model-2']);
|
||||
|
||||
// Restore mocks
|
||||
Response.json = originalResponseJson;
|
||||
localService.getSupportedModels = originalGetSupportedModels;
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleChatRequest', () => {
|
||||
it('should call ChatSdk.handleChatRequest with correct parameters', async () => {
|
||||
const mockRequest = new Request('https://example.com/chat');
|
||||
const mockResponse = new Response('Test response');
|
||||
|
||||
ChatSdk.handleChatRequest.mockResolvedValue(mockResponse);
|
||||
|
||||
const result = await chatService.handleChatRequest(mockRequest);
|
||||
|
||||
expect(ChatSdk.handleChatRequest).toHaveBeenCalledWith(mockRequest, {
|
||||
openai: chatService.openai,
|
||||
env: mockEnv,
|
||||
systemPrompt: chatService.systemPrompt,
|
||||
maxTokens: chatService.maxTokens,
|
||||
});
|
||||
|
||||
expect(result).toBe(mockResponse);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleSseStream', () => {
|
||||
it('should return 409 if stream is already active', async () => {
|
||||
const streamId = 'test-stream-id';
|
||||
|
||||
// Set active stream
|
||||
chatService.setActiveStream(streamId, {});
|
||||
|
||||
const result = await chatService.handleSseStream(streamId);
|
||||
|
||||
expect(result.status).toBe(409);
|
||||
expect(await result.text()).toBe('Stream already active');
|
||||
});
|
||||
|
||||
it('should return 404 if stream data is not found', async () => {
|
||||
const streamId = 'non-existent-stream';
|
||||
|
||||
// Mock the SERVER_COORDINATOR.get() to return an object with getStreamData
|
||||
const mockDurableObject = {
|
||||
getStreamData: vi.fn().mockResolvedValue(null),
|
||||
};
|
||||
|
||||
// Update the mockEnv to use our mock
|
||||
const updatedEnv = {
|
||||
...mockEnv,
|
||||
SERVER_COORDINATOR: {
|
||||
idFromName: vi.fn().mockReturnValue('test-id'),
|
||||
get: vi.fn().mockReturnValue(mockDurableObject),
|
||||
},
|
||||
};
|
||||
|
||||
// Set the environment
|
||||
chatService.setEnv(updatedEnv);
|
||||
|
||||
const result = await chatService.handleSseStream(streamId);
|
||||
|
||||
expect(result.status).toBe(404);
|
||||
expect(await result.text()).toBe('Stream not found');
|
||||
});
|
||||
|
||||
it('should create and return an SSE stream when valid', async () => {
|
||||
const streamId = 'test-stream-id';
|
||||
|
||||
// Create a new service instance for this test
|
||||
const testService = ChatService.create({
|
||||
maxTokens: 2000,
|
||||
systemPrompt: 'You are a helpful assistant.',
|
||||
});
|
||||
|
||||
// Set up minimal environment
|
||||
testService.setEnv({
|
||||
SERVER_COORDINATOR: {
|
||||
idFromName: vi.fn(),
|
||||
get: vi.fn(),
|
||||
},
|
||||
});
|
||||
|
||||
// Save the original method
|
||||
const originalHandleSseStream = testService.handleSseStream;
|
||||
|
||||
// Mock the handleSseStream method directly on the instance
|
||||
testService.handleSseStream = vi.fn().mockResolvedValueOnce({
|
||||
body: 'response-stream',
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
},
|
||||
status: 200,
|
||||
text: vi.fn().mockResolvedValue(''),
|
||||
});
|
||||
|
||||
const result = await testService.handleSseStream(streamId);
|
||||
|
||||
// Verify the response
|
||||
expect(result.body).toBe('response-stream');
|
||||
expect(result.headers['Content-Type']).toBe('text/event-stream');
|
||||
expect(result.headers['Cache-Control']).toBe('no-cache');
|
||||
expect(result.headers['Connection']).toBe('keep-alive');
|
||||
|
||||
// Restore the original method
|
||||
testService.handleSseStream = originalHandleSseStream;
|
||||
});
|
||||
});
|
||||
|
||||
describe('ClientError', () => {
|
||||
it('should create a ClientError with the correct properties', () => {
|
||||
const error = new ClientError('Test error', 400, { detail: 'test' });
|
||||
|
||||
expect(error.message).toBe('Test error');
|
||||
expect(error.statusCode).toBe(400);
|
||||
expect(error.details).toEqual({ detail: 'test' });
|
||||
expect(error.name).toBe('ClientError');
|
||||
});
|
||||
|
||||
it('should format the error for SSE', () => {
|
||||
const error = new ClientError('Test error', 400, { detail: 'test' });
|
||||
|
||||
const formatted = error.formatForSSE();
|
||||
const parsed = JSON.parse(formatted);
|
||||
|
||||
expect(parsed).toEqual({
|
||||
type: 'error',
|
||||
message: 'Test error',
|
||||
details: { detail: 'test' },
|
||||
statusCode: 400,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
151
packages/server/src/services/__tests__/ContactService.test.ts
Normal file
151
packages/server/src/services/__tests__/ContactService.test.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import { getSnapshot } from 'mobx-state-tree';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
|
||||
import ContactRecord from '../../models/ContactRecord.ts';
|
||||
import ContactService from '../ContactService.ts';
|
||||
|
||||
describe('ContactService', () => {
|
||||
let contactService;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a new instance of the service before each test
|
||||
contactService = ContactService.create();
|
||||
|
||||
// Reset mocks
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('Initial state', () => {
|
||||
it('should have empty env and ctx objects initially', () => {
|
||||
expect(contactService.env).toEqual({});
|
||||
expect(contactService.ctx).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('setEnv', () => {
|
||||
it('should set the environment', () => {
|
||||
const mockEnv = { KV_STORAGE: { put: vi.fn() }, EMAIL_SERVICE: { sendMail: vi.fn() } };
|
||||
contactService.setEnv(mockEnv);
|
||||
expect(contactService.env).toEqual(mockEnv);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setCtx', () => {
|
||||
it('should set the execution context', () => {
|
||||
const mockCtx = { waitUntil: vi.fn() };
|
||||
contactService.setCtx(mockCtx);
|
||||
expect(contactService.ctx).toEqual(mockCtx);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleContact', () => {
|
||||
it('should process a valid contact request and return a success response', async () => {
|
||||
// Mock crypto.randomUUID
|
||||
vi.stubGlobal('crypto', {
|
||||
randomUUID: vi.fn().mockReturnValue('mock-uuid'),
|
||||
});
|
||||
|
||||
// Mock date for consistent testing
|
||||
const mockDate = new Date('2023-01-01T12:00:00Z');
|
||||
vi.useFakeTimers();
|
||||
vi.setSystemTime(mockDate);
|
||||
|
||||
// Create mock request data
|
||||
const contactData = {
|
||||
markdown: 'Test message',
|
||||
email: 'test@example.com',
|
||||
firstname: 'John',
|
||||
lastname: 'Doe',
|
||||
};
|
||||
|
||||
// Create mock request
|
||||
const mockRequest = {
|
||||
json: vi.fn().mockResolvedValue(contactData),
|
||||
};
|
||||
|
||||
// Create mock environment
|
||||
const mockEnv = {
|
||||
KV_STORAGE: {
|
||||
put: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
EMAIL_SERVICE: {
|
||||
sendMail: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
};
|
||||
|
||||
// Set the environment
|
||||
contactService.setEnv(mockEnv);
|
||||
|
||||
// Call the method
|
||||
const result = await contactService.handleContact(mockRequest as any);
|
||||
|
||||
// Verify KV_STORAGE.put was called with correct arguments
|
||||
const expectedContactRecord = ContactRecord.create({
|
||||
message: contactData.markdown,
|
||||
timestamp: mockDate.toISOString(),
|
||||
email: contactData.email,
|
||||
firstname: contactData.firstname,
|
||||
lastname: contactData.lastname,
|
||||
});
|
||||
|
||||
expect(mockEnv.KV_STORAGE.put).toHaveBeenCalledWith(
|
||||
'contact:mock-uuid',
|
||||
JSON.stringify(getSnapshot(expectedContactRecord)),
|
||||
);
|
||||
|
||||
// Verify EMAIL_SERVICE.sendMail was called with correct arguments
|
||||
expect(mockEnv.EMAIL_SERVICE.sendMail).toHaveBeenCalledWith({
|
||||
to: 'geoff@seemueller.io',
|
||||
plaintextMessage: expect.stringContaining(contactData.markdown),
|
||||
});
|
||||
|
||||
// Verify result is a success Response
|
||||
expect(result).toBeInstanceOf(Response);
|
||||
expect(result.status).toBe(200);
|
||||
|
||||
// Verify response body
|
||||
const text = await result.clone().text();
|
||||
expect(text).toBe('Contact record saved successfully');
|
||||
|
||||
// Restore real timers
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should return a 500 response when an error occurs', async () => {
|
||||
// Create mock request that throws an error
|
||||
const mockRequest = {
|
||||
json: vi.fn().mockRejectedValue(new Error('Invalid JSON')),
|
||||
};
|
||||
|
||||
// Create mock environment
|
||||
const mockEnv = {
|
||||
KV_STORAGE: {
|
||||
put: vi.fn(),
|
||||
},
|
||||
EMAIL_SERVICE: {
|
||||
sendMail: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
// Set the environment
|
||||
contactService.setEnv(mockEnv);
|
||||
|
||||
// Call the method
|
||||
const result = await contactService.handleContact(mockRequest as any);
|
||||
|
||||
// Verify KV_STORAGE.put was not called
|
||||
expect(mockEnv.KV_STORAGE.put).not.toHaveBeenCalled();
|
||||
|
||||
// Verify EMAIL_SERVICE.sendMail was not called
|
||||
expect(mockEnv.EMAIL_SERVICE.sendMail).not.toHaveBeenCalled();
|
||||
|
||||
// Verify result is an error Response
|
||||
expect(result).toBeInstanceOf(Response);
|
||||
expect(result.status).toBe(500);
|
||||
|
||||
// Verify response body
|
||||
const text = await result.clone().text();
|
||||
expect(text).toBe('Failed to process contact request');
|
||||
});
|
||||
});
|
||||
});
|
204
packages/server/src/services/__tests__/FeedbackService.test.ts
Normal file
204
packages/server/src/services/__tests__/FeedbackService.test.ts
Normal file
@@ -0,0 +1,204 @@
|
||||
import { getSnapshot } from 'mobx-state-tree';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
|
||||
import FeedbackRecord from '../../models/FeedbackRecord.ts';
|
||||
import FeedbackService from '../FeedbackService.ts';
|
||||
|
||||
describe('FeedbackService', () => {
|
||||
let feedbackService;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a new instance of the service before each test
|
||||
feedbackService = FeedbackService.create();
|
||||
|
||||
// Reset mocks
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('Initial state', () => {
|
||||
it('should have empty env and ctx objects initially', () => {
|
||||
expect(feedbackService.env).toEqual({});
|
||||
expect(feedbackService.ctx).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('setEnv', () => {
|
||||
it('should set the environment', () => {
|
||||
const mockEnv = { KV_STORAGE: { put: vi.fn() }, EMAIL_SERVICE: { sendMail: vi.fn() } };
|
||||
feedbackService.setEnv(mockEnv);
|
||||
expect(feedbackService.env).toEqual(mockEnv);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setCtx', () => {
|
||||
it('should set the execution context', () => {
|
||||
const mockCtx = { waitUntil: vi.fn() };
|
||||
feedbackService.setCtx(mockCtx);
|
||||
expect(feedbackService.ctx).toEqual(mockCtx);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleFeedback', () => {
|
||||
it('should process a valid feedback request and return a success response', async () => {
|
||||
// Mock crypto.randomUUID
|
||||
vi.stubGlobal('crypto', {
|
||||
randomUUID: vi.fn().mockReturnValue('mock-uuid'),
|
||||
});
|
||||
|
||||
// Mock date for consistent testing
|
||||
const mockDate = new Date('2023-01-01T12:00:00Z');
|
||||
vi.useFakeTimers();
|
||||
vi.setSystemTime(mockDate);
|
||||
|
||||
// Create mock request data
|
||||
const feedbackData = {
|
||||
feedback: 'This is a test feedback',
|
||||
user: 'TestUser',
|
||||
};
|
||||
|
||||
// Create mock request
|
||||
const mockRequest = {
|
||||
json: vi.fn().mockResolvedValue(feedbackData),
|
||||
};
|
||||
|
||||
// Create mock environment
|
||||
const mockEnv = {
|
||||
KV_STORAGE: {
|
||||
put: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
EMAIL_SERVICE: {
|
||||
sendMail: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
};
|
||||
|
||||
// Set the environment
|
||||
feedbackService.setEnv(mockEnv);
|
||||
|
||||
// Call the method
|
||||
const result = await feedbackService.handleFeedback(mockRequest as any);
|
||||
|
||||
// Verify KV_STORAGE.put was called with correct arguments
|
||||
const expectedFeedbackRecord = FeedbackRecord.create({
|
||||
feedback: feedbackData.feedback,
|
||||
timestamp: mockDate.toISOString(),
|
||||
user: feedbackData.user,
|
||||
});
|
||||
|
||||
expect(mockEnv.KV_STORAGE.put).toHaveBeenCalledWith(
|
||||
'feedback:mock-uuid',
|
||||
JSON.stringify(getSnapshot(expectedFeedbackRecord)),
|
||||
);
|
||||
|
||||
// Verify EMAIL_SERVICE.sendMail was called with correct arguments
|
||||
expect(mockEnv.EMAIL_SERVICE.sendMail).toHaveBeenCalledWith({
|
||||
to: 'geoff@seemueller.io',
|
||||
plaintextMessage: expect.stringContaining(feedbackData.feedback),
|
||||
});
|
||||
|
||||
// Verify result is a success Response
|
||||
expect(result).toBeInstanceOf(Response);
|
||||
expect(result.status).toBe(200);
|
||||
|
||||
// Verify response body
|
||||
const text = await result.clone().text();
|
||||
expect(text).toBe('Feedback saved successfully');
|
||||
|
||||
// Restore real timers
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should use default values when not provided in the request', async () => {
|
||||
// Mock crypto.randomUUID
|
||||
vi.stubGlobal('crypto', {
|
||||
randomUUID: vi.fn().mockReturnValue('mock-uuid'),
|
||||
});
|
||||
|
||||
// Mock date for consistent testing
|
||||
const mockDate = new Date('2023-01-01T12:00:00Z');
|
||||
vi.useFakeTimers();
|
||||
vi.setSystemTime(mockDate);
|
||||
|
||||
// Create mock request data with only feedback
|
||||
const feedbackData = {
|
||||
feedback: 'This is a test feedback',
|
||||
};
|
||||
|
||||
// Create mock request
|
||||
const mockRequest = {
|
||||
json: vi.fn().mockResolvedValue(feedbackData),
|
||||
};
|
||||
|
||||
// Create mock environment
|
||||
const mockEnv = {
|
||||
KV_STORAGE: {
|
||||
put: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
EMAIL_SERVICE: {
|
||||
sendMail: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
};
|
||||
|
||||
// Set the environment
|
||||
feedbackService.setEnv(mockEnv);
|
||||
|
||||
// Call the method
|
||||
const result = await feedbackService.handleFeedback(mockRequest as any);
|
||||
|
||||
// Verify KV_STORAGE.put was called with correct arguments
|
||||
const expectedFeedbackRecord = FeedbackRecord.create({
|
||||
feedback: feedbackData.feedback,
|
||||
timestamp: mockDate.toISOString(),
|
||||
user: 'Anonymous', // Default value
|
||||
});
|
||||
|
||||
expect(mockEnv.KV_STORAGE.put).toHaveBeenCalledWith(
|
||||
'feedback:mock-uuid',
|
||||
JSON.stringify(getSnapshot(expectedFeedbackRecord)),
|
||||
);
|
||||
|
||||
// Verify result is a success Response
|
||||
expect(result).toBeInstanceOf(Response);
|
||||
expect(result.status).toBe(200);
|
||||
|
||||
// Restore real timers
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should return a 500 response when an error occurs', async () => {
|
||||
// Create mock request that throws an error
|
||||
const mockRequest = {
|
||||
json: vi.fn().mockRejectedValue(new Error('Invalid JSON')),
|
||||
};
|
||||
|
||||
// Create mock environment
|
||||
const mockEnv = {
|
||||
KV_STORAGE: {
|
||||
put: vi.fn(),
|
||||
},
|
||||
EMAIL_SERVICE: {
|
||||
sendMail: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
// Set the environment
|
||||
feedbackService.setEnv(mockEnv);
|
||||
|
||||
// Call the method
|
||||
const result = await feedbackService.handleFeedback(mockRequest as any);
|
||||
|
||||
// Verify KV_STORAGE.put was not called
|
||||
expect(mockEnv.KV_STORAGE.put).not.toHaveBeenCalled();
|
||||
|
||||
// Verify EMAIL_SERVICE.sendMail was not called
|
||||
expect(mockEnv.EMAIL_SERVICE.sendMail).not.toHaveBeenCalled();
|
||||
|
||||
// Verify result is an error Response
|
||||
expect(result).toBeInstanceOf(Response);
|
||||
expect(result.status).toBe(500);
|
||||
|
||||
// Verify response body
|
||||
const text = await result.clone().text();
|
||||
expect(text).toBe('Failed to process feedback request');
|
||||
});
|
||||
});
|
||||
});
|
@@ -0,0 +1,10 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
|
||||
import MetricsService from '../MetricsService.ts';
|
||||
|
||||
describe('MetricsService', () => {
|
||||
it('should create a metrics service', () => {
|
||||
const metricsService = MetricsService.create();
|
||||
expect(metricsService).toBeTruthy();
|
||||
});
|
||||
});
|
@@ -0,0 +1,224 @@
|
||||
import { getSnapshot, Instance } from 'mobx-state-tree';
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
|
||||
import TransactionService from '../TransactionService.ts';
|
||||
|
||||
// Define types for testing
|
||||
type TransactionServiceInstance = Instance<typeof TransactionService>;
|
||||
|
||||
// Mock global types
|
||||
vi.stubGlobal(
|
||||
'Response',
|
||||
class MockResponse {
|
||||
status: number;
|
||||
headers: Headers;
|
||||
body: any;
|
||||
|
||||
constructor(body?: any, init?: ResponseInit) {
|
||||
this.body = body;
|
||||
this.status = init?.status || 200;
|
||||
this.headers = new Headers(init?.headers);
|
||||
}
|
||||
|
||||
clone() {
|
||||
return this;
|
||||
}
|
||||
|
||||
async text() {
|
||||
return this.body?.toString() || '';
|
||||
}
|
||||
|
||||
async json() {
|
||||
return typeof this.body === 'string' ? JSON.parse(this.body) : this.body;
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
describe('TransactionService', () => {
|
||||
let transactionService: TransactionServiceInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a new instance of the service before each test
|
||||
transactionService = TransactionService.create();
|
||||
|
||||
// Reset mocks
|
||||
vi.resetAllMocks();
|
||||
|
||||
// Mock crypto.randomUUID
|
||||
vi.spyOn(crypto, 'randomUUID').mockReturnValue('mock-uuid');
|
||||
});
|
||||
|
||||
describe('Initial state', () => {
|
||||
it('should have empty env and ctx objects initially', () => {
|
||||
expect(transactionService.env).toEqual({});
|
||||
expect(transactionService.ctx).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('setEnv', () => {
|
||||
it('should set the environment', () => {
|
||||
const mockEnv = { KV_STORAGE: { put: vi.fn() } };
|
||||
transactionService.setEnv(mockEnv);
|
||||
expect(transactionService.env).toEqual(mockEnv);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setCtx', () => {
|
||||
it('should set the execution context', () => {
|
||||
const mockCtx = { waitUntil: vi.fn() };
|
||||
transactionService.setCtx(mockCtx);
|
||||
expect(transactionService.ctx).toEqual(mockCtx);
|
||||
});
|
||||
});
|
||||
|
||||
describe('routeAction', () => {
|
||||
it('should route to the correct handler', async () => {
|
||||
// Mock the handler
|
||||
const mockHandlePrepareTransaction = vi.fn().mockResolvedValue({ success: true });
|
||||
transactionService.handlePrepareTransaction = mockHandlePrepareTransaction;
|
||||
|
||||
// Call routeAction with a valid action
|
||||
const result = await transactionService.routeAction('PREPARE_TX', ['data']);
|
||||
|
||||
// Verify the handler was called with the correct data
|
||||
expect(mockHandlePrepareTransaction).toHaveBeenCalledWith(['data']);
|
||||
expect(result).toEqual({ success: true });
|
||||
});
|
||||
|
||||
it('should throw an error for unknown actions', async () => {
|
||||
// Call routeAction with an invalid action
|
||||
await expect(transactionService.routeAction('UNKNOWN_ACTION', ['data'])).rejects.toThrow(
|
||||
'No handler for action: UNKNOWN_ACTION',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handlePrepareTransaction', () => {
|
||||
beforeEach(() => {
|
||||
// Mock fetch
|
||||
global.fetch = vi.fn();
|
||||
|
||||
// Mock KV_STORAGE
|
||||
const mockEnv = {
|
||||
KV_STORAGE: {
|
||||
put: vi.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
};
|
||||
transactionService.setEnv(mockEnv);
|
||||
});
|
||||
|
||||
it('should prepare a transaction correctly', async () => {
|
||||
// Mock wallet API response
|
||||
const mockWalletResponse = JSON.stringify([
|
||||
'mock-address',
|
||||
'mock-private-key',
|
||||
'mock-public-key',
|
||||
'mock-phrase',
|
||||
]);
|
||||
|
||||
global.fetch.mockResolvedValue({
|
||||
text: vi.fn().mockResolvedValue(mockWalletResponse),
|
||||
});
|
||||
|
||||
// Call the method with test data
|
||||
const result = await transactionService.handlePrepareTransaction([
|
||||
'donor123',
|
||||
'bitcoin',
|
||||
'0.01',
|
||||
]);
|
||||
|
||||
// Verify fetch was called with the correct URL
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://wallets.seemueller.io/api/btc/create');
|
||||
|
||||
// Verify KV_STORAGE.put was called with the correct data
|
||||
expect(transactionService.env.KV_STORAGE.put).toHaveBeenCalledWith(
|
||||
'transactions::prepared::mock-uuid',
|
||||
expect.stringContaining('mock-address'),
|
||||
);
|
||||
|
||||
// Verify the returned data
|
||||
expect(result).toEqual({
|
||||
depositAddress: 'mock-address',
|
||||
txKey: 'mock-uuid',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle different currencies correctly', async () => {
|
||||
// Mock wallet API response
|
||||
const mockWalletResponse = JSON.stringify([
|
||||
'mock-address',
|
||||
'mock-private-key',
|
||||
'mock-public-key',
|
||||
'mock-phrase',
|
||||
]);
|
||||
|
||||
global.fetch.mockResolvedValue({
|
||||
text: vi.fn().mockResolvedValue(mockWalletResponse),
|
||||
});
|
||||
|
||||
// Test with ethereum
|
||||
await transactionService.handlePrepareTransaction(['donor123', 'ethereum', '0.01']);
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://wallets.seemueller.io/api/eth/create');
|
||||
|
||||
// Reset mock and test with dogecoin
|
||||
vi.resetAllMocks();
|
||||
global.fetch.mockResolvedValue({
|
||||
text: vi.fn().mockResolvedValue(mockWalletResponse),
|
||||
});
|
||||
|
||||
await transactionService.handlePrepareTransaction(['donor123', 'dogecoin', '0.01']);
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://wallets.seemueller.io/api/doge/create');
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleTransact', () => {
|
||||
beforeEach(() => {
|
||||
// Mock routeAction
|
||||
transactionService.routeAction = vi.fn().mockResolvedValue({ success: true });
|
||||
});
|
||||
|
||||
it('should process a valid transaction request', async () => {
|
||||
// Create a mock request
|
||||
const mockRequest = {
|
||||
text: vi.fn().mockResolvedValue('PREPARE_TX,donor123,bitcoin,0.01'),
|
||||
};
|
||||
|
||||
// Call the method
|
||||
const response = await transactionService.handleTransact(mockRequest);
|
||||
|
||||
// Verify routeAction was called with the correct parameters
|
||||
expect(transactionService.routeAction).toHaveBeenCalledWith('PREPARE_TX', [
|
||||
'donor123',
|
||||
'bitcoin',
|
||||
'0.01',
|
||||
]);
|
||||
|
||||
// Verify the response
|
||||
expect(response).toBeInstanceOf(Response);
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const responseBody = await response.json();
|
||||
expect(responseBody).toEqual({ success: true });
|
||||
});
|
||||
|
||||
it('should handle errors gracefully', async () => {
|
||||
// Create a mock request
|
||||
const mockRequest = {
|
||||
text: vi.fn().mockResolvedValue('PREPARE_TX,donor123,bitcoin,0.01'),
|
||||
};
|
||||
|
||||
// Make routeAction throw an error
|
||||
transactionService.routeAction = vi.fn().mockRejectedValue(new Error('Test error'));
|
||||
|
||||
// Call the method
|
||||
const response = await transactionService.handleTransact(mockRequest);
|
||||
|
||||
// Verify the error response
|
||||
expect(response).toBeInstanceOf(Response);
|
||||
expect(response.status).toBe(500);
|
||||
|
||||
const responseBody = await response.json();
|
||||
expect(responseBody).toEqual({ error: 'Transaction failed' });
|
||||
});
|
||||
});
|
||||
});
|
130
packages/server/src/storage/BunSqliteKVNamespace.ts
Normal file
130
packages/server/src/storage/BunSqliteKVNamespace.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import type {
|
||||
KVNamespace,
|
||||
KVNamespaceGetOptions,
|
||||
KVNamespaceListOptions,
|
||||
KVNamespaceListResult,
|
||||
KVNamespacePutOptions,
|
||||
} from '@cloudflare/workers-types';
|
||||
import { BunSqliteKeyValue } from 'bun-sqlite-key-value';
|
||||
|
||||
import { OPEN_GSIO_DATA_DIR } from '../../vars.ts';
|
||||
|
||||
interface BaseKV extends KVNamespace {}
|
||||
|
||||
interface _Options {
|
||||
namespace: string;
|
||||
path: string;
|
||||
}
|
||||
|
||||
const defaultOptions = {
|
||||
namespace: 'open-gsio',
|
||||
path: OPEN_GSIO_DATA_DIR,
|
||||
};
|
||||
|
||||
export class BunSqliteKVNamespace implements BaseKV {
|
||||
private db: any;
|
||||
|
||||
constructor(options?: { namespace?: string; path?: string }) {
|
||||
const merged = { ...defaultOptions, ...options };
|
||||
const { namespace, path } = merged;
|
||||
|
||||
this.db = new BunSqliteKeyValue(`${path}/${namespace}`);
|
||||
}
|
||||
|
||||
async delete(key: string): Promise<void> {
|
||||
await this.db.delete(key);
|
||||
}
|
||||
|
||||
async get(
|
||||
key: string | Array<string>,
|
||||
options?:
|
||||
| Partial<KVNamespaceGetOptions<undefined>>
|
||||
| 'text'
|
||||
| 'json'
|
||||
| 'arrayBuffer'
|
||||
| 'stream'
|
||||
| KVNamespaceGetOptions<'text'>
|
||||
| KVNamespaceGetOptions<'json'>
|
||||
| KVNamespaceGetOptions<'arrayBuffer'>
|
||||
| KVNamespaceGetOptions<'stream'>
|
||||
| 'text'
|
||||
| 'json',
|
||||
): Promise<any> {
|
||||
if (Array.isArray(key)) {
|
||||
const result = new Map();
|
||||
for (const k of key) {
|
||||
const value = await this.db.get(k);
|
||||
result.set(k, value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const value = await this.db.get(key);
|
||||
if (value === undefined) return null;
|
||||
|
||||
if (
|
||||
!options ||
|
||||
options === 'text' ||
|
||||
(typeof options === 'object' && options.type === 'text')
|
||||
) {
|
||||
return value;
|
||||
}
|
||||
if (options === 'json' || (typeof options === 'object' && options.type === 'json')) {
|
||||
return JSON.parse(value);
|
||||
}
|
||||
if (
|
||||
options === 'arrayBuffer' ||
|
||||
(typeof options === 'object' && options.type === 'arrayBuffer')
|
||||
) {
|
||||
return new TextEncoder().encode(value).buffer;
|
||||
}
|
||||
if (options === 'stream' || (typeof options === 'object' && options.type === 'stream')) {
|
||||
return new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode(value));
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
getWithMetadata(_key: string | Array<string>, _options?: any): any {
|
||||
return null;
|
||||
}
|
||||
|
||||
async list<Metadata = unknown>(
|
||||
_options?: KVNamespaceListOptions,
|
||||
): Promise<KVNamespaceListResult<Metadata, string>> {
|
||||
const keys = await this.db.keys();
|
||||
return {
|
||||
keys: keys.map(key => ({ name: key })),
|
||||
list_complete: true,
|
||||
cursor: '',
|
||||
};
|
||||
}
|
||||
|
||||
async put(
|
||||
key: string,
|
||||
value: string | ArrayBuffer | ArrayBufferView | ReadableStream,
|
||||
_options?: KVNamespacePutOptions,
|
||||
): Promise<void> {
|
||||
if (value instanceof ArrayBuffer || ArrayBuffer.isView(value)) {
|
||||
value = new TextDecoder().decode(value);
|
||||
} else if (value instanceof ReadableStream) {
|
||||
const reader = value.getReader();
|
||||
const chunks = [];
|
||||
let readDone = false;
|
||||
while (!readDone) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
readDone = true;
|
||||
} else {
|
||||
chunks.push(value);
|
||||
}
|
||||
}
|
||||
value = new TextDecoder().decode(new Uint8Array(Buffer.concat(chunks)));
|
||||
}
|
||||
await this.db.set(key, value);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user