- Add killport.js script for terminating processes on specific ports

- Introduce `supportedModels` in `ClientChatStore` and update model validation logic
- Enhance OpenAI inferencing with local setup adaptations and improved streaming options
- Modify ChatService to handle local and remote model fetching
- Update input menu to dynamically fetch and display supported models
- Add start_inference_server.sh for initiating local inference server
- Upgrade OpenAI SDK to v5.0.1 and adjust dependencies accordingly
This commit is contained in:
geoffsee
2025-05-29 19:28:54 -04:00
committed by Geoff Seemueller
parent c9ee7c7690
commit cc0da17b5f
11 changed files with 204 additions and 23 deletions

View File

@@ -53,7 +53,17 @@ const InputMenu: React.FC<{ isDisabled?: boolean }> = observer(
setControlledOpen(isOpen);
}, [isOpen]);
const textModels = SUPPORTED_MODELS;
const getSupportedModels = async () => {
return await (await fetch("/api/models")).json();
}
useEffect(() => {
getSupportedModels().then((supportedModels) => {
ClientChatStore.setSupportedModels(supportedModels);
});
}, []);
const handleClose = useCallback(() => {
onClose();
@@ -75,9 +85,7 @@ const InputMenu: React.FC<{ isDisabled?: boolean }> = observer(
}, [onClose]);
async function selectModelFn({ name, value }) {
if (getModelFamily(value)) {
ClientChatStore.setModel(value);
}
}
function isSelectedModelFn({ name, value }) {
@@ -144,7 +152,7 @@ const InputMenu: React.FC<{ isDisabled?: boolean }> = observer(
>
<FlyoutSubMenu
title="Text Models"
flyoutMenuOptions={textModels.map((m) => ({ name: m, value: m }))}
flyoutMenuOptions={ClientChatStore.supportedModels.map((m) => ({ name: m, value: m }))}
onClose={onClose}
parentIsOpen={isOpen}
setMenuState={setMenuState}