mirror of
https://github.com/geoffsee/open-gsio.git
synced 2025-09-08 22:56:46 +00:00
- Add killport.js script for terminating processes on specific ports
- Introduce `supportedModels` in `ClientChatStore` and update model validation logic - Enhance OpenAI inferencing with local setup adaptations and improved streaming options - Modify ChatService to handle local and remote model fetching - Update input menu to dynamically fetch and display supported models - Add start_inference_server.sh for initiating local inference server - Upgrade OpenAI SDK to v5.0.1 and adjust dependencies accordingly
This commit is contained in:

committed by
Geoff Seemueller

parent
c9ee7c7690
commit
cc0da17b5f
@@ -59,4 +59,38 @@ export class Utils {
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static normalizeWithBlanks<T extends Normalize.ChatMessage>(msgs: T[]): T[] {
|
||||
const out: T[] = [];
|
||||
|
||||
// In local mode first turn expected to be user.
|
||||
let expected: Normalize.Role = "user";
|
||||
|
||||
for (const m of msgs) {
|
||||
while (m.role !== expected) {
|
||||
// Insert blanks to match expected sequence user/assistant/user...
|
||||
out.push(Normalize.makeBlank(expected) as T);
|
||||
expected = expected === "user" ? "assistant" : "user";
|
||||
}
|
||||
|
||||
out.push(m);
|
||||
expected = expected === "user" ? "assistant" : "user";
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module Normalize {
|
||||
export type Role = "user" | "assistant";
|
||||
|
||||
export interface ChatMessage extends Record<any, any> {
|
||||
role: Role;
|
||||
}
|
||||
|
||||
export const makeBlank = (role: Role): ChatMessage => ({
|
||||
role,
|
||||
content: ""
|
||||
});
|
||||
}
|
||||
|
Reference in New Issue
Block a user