mirror of
https://github.com/geoffsee/open-gsio.git
synced 2025-09-08 22:56:46 +00:00

- Moved `providers`, `services`, `models`, `lib`, and related files to `src` directory within `server` package. - Adjusted imports across the codebase to reflect the new paths. - Renamed several `.ts` files for consistency. - Introduced an `index.ts` in the `ai/providers` package to export all providers. This improves maintainability and aligns with the project's updated directory structure.
45 lines
1.6 KiB
TypeScript
45 lines
1.6 KiB
TypeScript
import ChatSdk from '@open-gsio/server/src/lib/chat-sdk.ts';
|
|
import { OpenAI } from 'openai';
|
|
|
|
export interface CommonProviderParams {
|
|
openai?: OpenAI; // Optional for providers that use a custom client.
|
|
systemPrompt: any;
|
|
preprocessedContext: any;
|
|
maxTokens: number | unknown | undefined;
|
|
messages: any;
|
|
model: string;
|
|
env: Env;
|
|
disableWebhookGeneration?: boolean;
|
|
// Additional fields can be added as needed
|
|
}
|
|
|
|
export interface ChatStreamProvider {
|
|
handleStream(param: CommonProviderParams, dataCallback: (data: any) => void): Promise<any>;
|
|
}
|
|
|
|
export abstract class BaseChatProvider implements ChatStreamProvider {
|
|
abstract getOpenAIClient(param: CommonProviderParams): OpenAI;
|
|
abstract getStreamParams(param: CommonProviderParams, safeMessages: any[]): any;
|
|
abstract async processChunk(chunk: any, dataCallback: (data: any) => void): Promise<boolean>;
|
|
|
|
async handleStream(param: CommonProviderParams, dataCallback: (data: any) => void) {
|
|
const assistantPrompt = ChatSdk.buildAssistantPrompt({ maxTokens: param.maxTokens });
|
|
const safeMessages = await ChatSdk.buildMessageChain(param.messages, {
|
|
systemPrompt: param.systemPrompt,
|
|
model: param.model,
|
|
assistantPrompt,
|
|
toolResults: param.preprocessedContext,
|
|
env: param.env,
|
|
});
|
|
|
|
const client = this.getOpenAIClient(param);
|
|
const streamParams = this.getStreamParams(param, safeMessages);
|
|
const stream = await client.chat.completions.create(streamParams);
|
|
|
|
for await (const chunk of stream) {
|
|
const shouldBreak = await this.processChunk(chunk, dataCallback);
|
|
if (shouldBreak) break;
|
|
}
|
|
}
|
|
}
|