Remove file upload functionality and related components

The `FileUploadStore` and all file upload features were removed, simplifying the chat interface. This change eliminates unused code, including file handling logic, attachment management, and UI elements, streamlining the application.
This commit is contained in:
geoffsee
2025-05-27 14:01:41 -04:00
committed by Geoff Seemueller
parent 18ba4aec21
commit c04e19611e
12 changed files with 284 additions and 588 deletions

View File

@@ -1,15 +1,6 @@
import React, { useEffect, useRef, useState } from "react"; import React, {useEffect, useRef, useState} from "react";
import { observer } from "mobx-react-lite"; import {observer} from "mobx-react-lite";
import { import {Box, chakra, InputGroup,} from "@chakra-ui/react";
Alert,
AlertIcon,
Box,
chakra,
HStack,
InputGroup,
} from "@chakra-ui/react";
import fileUploadStore from "../../stores/FileUploadStore";
import { UploadedItem } from "./Attachments";
import AutoResize from "react-textarea-autosize"; import AutoResize from "react-textarea-autosize";
const AutoResizeTextArea = chakra(AutoResize); const AutoResizeTextArea = chakra(AutoResize);
@@ -24,24 +15,6 @@ interface InputTextAreaProps {
const InputTextArea: React.FC<InputTextAreaProps> = observer( const InputTextArea: React.FC<InputTextAreaProps> = observer(
({ inputRef, value, onChange, onKeyDown, isLoading }) => { ({ inputRef, value, onChange, onKeyDown, isLoading }) => {
const fileInputRef = useRef<HTMLInputElement>(null);
const handleAttachmentClick = () => {
if (fileInputRef.current) {
fileInputRef.current.click();
}
};
const handleFileChange = (event: React.ChangeEvent<HTMLInputElement>) => {
const file = event.target.files?.[0];
if (file) {
fileUploadStore.uploadFile(file, "/api/documents");
}
};
const handleRemoveUploadedItem = (url: string) => {
fileUploadStore.removeUploadedFile(url);
};
const [heightConstraint, setHeightConstraint] = useState< const [heightConstraint, setHeightConstraint] = useState<
number | undefined number | undefined
@@ -61,26 +34,6 @@ const InputTextArea: React.FC<InputTextAreaProps> = observer(
display="flex" display="flex"
flexDirection="column" flexDirection="column"
> >
{/* Attachments Section */}
{fileUploadStore.uploadResults.length > 0 && (
<HStack
spacing={2}
mb={2}
overflowX="auto"
css={{ "&::-webkit-scrollbar": { display: "none" } }}
// Ensure attachments wrap if needed
flexWrap="wrap"
>
{fileUploadStore.uploadResults.map((result) => (
<UploadedItem
key={result.url}
url={result.url}
name={result.name}
onRemove={() => handleRemoveUploadedItem(result.url)}
/>
))}
</HStack>
)}
{/* Input Area */} {/* Input Area */}
<InputGroup position="relative"> <InputGroup position="relative">
@@ -97,7 +50,7 @@ const InputTextArea: React.FC<InputTextAreaProps> = observer(
pl="17px" pl="17px"
bg="rgba(255, 255, 255, 0.15)" bg="rgba(255, 255, 255, 0.15)"
color="text.primary" color="text.primary"
borderRadius="20px" // Set a consistent border radius borderRadius="20px"
border="none" border="none"
placeholder="Free my mind..." placeholder="Free my mind..."
_placeholder={{ color: "gray.400" }} _placeholder={{ color: "gray.400" }}
@@ -115,32 +68,7 @@ const InputTextArea: React.FC<InputTextAreaProps> = observer(
transition: "height 0.2s ease-in-out", transition: "height 0.2s ease-in-out",
}} }}
/> />
{/*<InputRightElement*/}
{/* position="absolute"*/}
{/* right={0}*/}
{/* top={0}*/}
{/* bottom={0}*/}
{/* width="40px"*/}
{/* height="100%"*/}
{/* display="flex"*/}
{/* alignItems="center"*/}
{/* justifyContent="center"*/}
{/*>*/}
{/*<EnableSearchButton />*/}
{/*</InputRightElement>*/}
</InputGroup> </InputGroup>
<input
type="file"
ref={fileInputRef}
style={{ display: "none" }}
onChange={handleFileChange}
/>
{fileUploadStore.uploadError && (
<Alert status="error" mt={2}>
<AlertIcon />
{fileUploadStore.uploadError}
</Alert>
)}
</Box> </Box>
); );
}, },

View File

@@ -1,3 +1,4 @@
// runs before anything else
import UserOptionsStore from "../stores/UserOptionsStore"; import UserOptionsStore from "../stores/UserOptionsStore";
UserOptionsStore.initialize(); UserOptionsStore.initialize();

View File

@@ -1,10 +1,10 @@
// https://vike.dev/data
import Routes from "../../src/renderer/routes"; import Routes from "../../src/renderer/routes";
export { data }; export { data };
export type Data = Awaited<ReturnType<typeof data>>; export type Data = Awaited<ReturnType<typeof data>>;
import type { PageContextServer } from "vike/types"; import type { PageContextServer } from "vike/types";
// sets the window title depending on the route
const data = async (pageContext: PageContextServer) => { const data = async (pageContext: PageContextServer) => {
const getTitle = (path) => { const getTitle = (path) => {
return Routes[normalizePath(path)]?.heroLabel || ""; return Routes[normalizePath(path)]?.heroLabel || "";

View File

@@ -1,3 +1,4 @@
// client error catcher
import { usePageContext } from "../../renderer/usePageContext"; import { usePageContext } from "../../renderer/usePageContext";
import { Center, Text } from "@chakra-ui/react"; import { Center, Text } from "@chakra-ui/react";

View File

@@ -4,7 +4,7 @@ import Chat from "../../components/chat/Chat";
import clientChatStore from "../../stores/ClientChatStore"; import clientChatStore from "../../stores/ClientChatStore";
import { getModelFamily } from "../../components/chat/SupportedModels"; import { getModelFamily } from "../../components/chat/SupportedModels";
// renders for path: "/" // renders "/"
export default function IndexPage() { export default function IndexPage() {
useEffect(() => { useEffect(() => {
try { try {

View File

@@ -1,10 +1,10 @@
// https://vike.dev/onRenderClient
export { onRenderClient }; export { onRenderClient };
import React from "react"; import React from "react";
import { hydrateRoot } from "react-dom/client"; import { hydrateRoot } from "react-dom/client";
import { Layout } from "../layout/Layout"; import { Layout } from "../layout/Layout";
// See https://vike.dev/onRenderClient for usage details
async function onRenderClient(pageContext) { async function onRenderClient(pageContext) {
const { Page, pageProps } = pageContext; const { Page, pageProps } = pageContext;
hydrateRoot( hydrateRoot(
@@ -13,4 +13,4 @@ async function onRenderClient(pageContext) {
<Page {...pageProps} /> <Page {...pageProps} />
</Layout>, </Layout>,
); );
} }

View File

@@ -1,5 +1,4 @@
import React from "react"; import React from "react";
// https://vike.dev/onRenderHtml
export { onRenderHtml }; export { onRenderHtml };
import { renderToStream } from "react-streaming/server"; import { renderToStream } from "react-streaming/server";
@@ -7,6 +6,7 @@ import { escapeInject } from "vike/server";
import { Layout } from "../layout/Layout"; import { Layout } from "../layout/Layout";
import type { OnRenderHtmlAsync } from "vike/types"; import type { OnRenderHtmlAsync } from "vike/types";
// See https://vike.dev/onRenderHtml for usage details
const onRenderHtml: OnRenderHtmlAsync = async ( const onRenderHtml: OnRenderHtmlAsync = async (
pageContext, pageContext,
): ReturnType<OnRenderHtmlAsync> => { ): ReturnType<OnRenderHtmlAsync> => {
@@ -49,8 +49,6 @@ window.ga_api = "/api/metrics";
return { return {
documentHtml: res, documentHtml: res,
pageContext: { pageContext: {},
// enableEagerStreaming: true
},
}; };
}; };

View File

@@ -1,3 +1,4 @@
// Top level control interface for navigation
export default { export default {
"/": { sidebarLabel: "Home", heroLabel: "g.s" }, "/": { sidebarLabel: "Home", heroLabel: "g.s" },
// "/about": { sidebarLabel: "About", heroLabel: "About Me" }, // "/about": { sidebarLabel: "About", heroLabel: "About Me" },

View File

@@ -1,4 +1,3 @@
// renderer/types.ts
export type { PageProps }; export type { PageProps };
type Page = (pageProps: PageProps) => React.ReactElement; type Page = (pageProps: PageProps) => React.ReactElement;
@@ -10,8 +9,6 @@ declare global {
Page: Page; Page: Page;
pageProps?: PageProps; pageProps?: PageProps;
fetch?: typeof fetch; fetch?: typeof fetch;
// Add your environment bindings here
env: import("../../workers/site/env"); env: import("../../workers/site/env");
} }
} }

View File

@@ -1,6 +1,5 @@
import { applySnapshot, flow, Instance, types } from "mobx-state-tree"; import { applySnapshot, flow, Instance, types } from "mobx-state-tree";
import Message from "../models/Message"; import Message from "../models/Message";
import Attachment from "../models/Attachment";
import IntermediateStep from "../models/IntermediateStep"; import IntermediateStep from "../models/IntermediateStep";
import UserOptionsStore from "./UserOptionsStore"; import UserOptionsStore from "./UserOptionsStore";
@@ -11,7 +10,6 @@ const ClientChatStore = types
isLoading: types.optional(types.boolean, false), isLoading: types.optional(types.boolean, false),
model: types.optional(types.string, "llama-3.3-70b-versatile"), model: types.optional(types.string, "llama-3.3-70b-versatile"),
imageModel: types.optional(types.string, "black-forest-labs/flux-1.1-pro"), imageModel: types.optional(types.string, "black-forest-labs/flux-1.1-pro"),
attachments: types.optional(types.array(Attachment), []),
tools: types.optional(types.array(types.string), []), tools: types.optional(types.array(types.string), []),
intermediateSteps: types.array(IntermediateStep), intermediateSteps: types.array(IntermediateStep),
}) })
@@ -41,7 +39,6 @@ const ClientChatStore = types
const payload = { const payload = {
messages: self.messages.slice(), messages: self.messages.slice(),
model: self.model, model: self.model,
attachments: self.attachments.slice(),
tools: self.tools.slice(), tools: self.tools.slice(),
}; };
@@ -163,7 +160,6 @@ const ClientChatStore = types
const payload = { const payload = {
messages: self.messages.slice(), messages: self.messages.slice(),
model: self.model, model: self.model,
attachments: self.attachments.slice(),
tools: self.tools.slice(), tools: self.tools.slice(),
}; };
@@ -244,15 +240,6 @@ const ClientChatStore = types
reset() { reset() {
applySnapshot(self, {}); applySnapshot(self, {});
}, },
addAttachment(attachment: Instance<typeof Attachment>) {
self.attachments.push(attachment);
if (self.attachments.length > 0) {
if (!self.tools.includes("user-attachments")) {
self.tools.push("user-attachments");
}
}
},
addIntermediateStep(stepData) { addIntermediateStep(stepData) {
return; return;
}, },
@@ -271,21 +258,6 @@ const ClientChatStore = types
self.messages.splice(index + 1); self.messages.splice(index + 1);
} }
}, },
removeAttachment(url: string) {
const f =
self.attachments.filter((attachment) => attachment.url !== url) ?? [];
self.attachments.clear();
self.attachments.push(...f);
if (self.attachments.length === 0) {
const remainingTools = self.tools.filter(
(tool) => tool !== "user-attachments",
);
self.tools.clear();
self.tools.push(...remainingTools);
}
},
setTools(tools: string[]) { setTools(tools: string[]) {
self.tools.clear(); self.tools.clear();
self.tools.push(...tools); self.tools.push(...tools);

View File

@@ -1,67 +0,0 @@
import { types, flow } from "mobx-state-tree";
import clientChatStore from "./ClientChatStore";
import Attachment from "../models/Attachment";
const FileUploadStore = types
.model("FileUploadStore", {
isUploading: types.optional(types.boolean, false),
uploadError: types.maybeNull(types.string),
uploadedFiles: types.array(types.string),
uploadResults: types.array(types.frozen()),
})
.actions((self) => ({
uploadFile: flow(function* (file: File, endpoint: string) {
if (!endpoint) {
self.uploadError = "Endpoint URL is required.";
return;
}
self.isUploading = true;
self.uploadError = null;
const formData = new FormData();
formData.append("file", file);
try {
const response = yield fetch(endpoint, {
method: "POST",
body: formData,
});
if (!response.ok) {
throw new Error(`Upload failed with status: ${response.status}`);
}
const result = yield response.json();
self.uploadResults.push(result);
if (result.url) {
self.uploadedFiles.push(result.url);
clientChatStore.addAttachment(
Attachment.create({
content: `${file.name}\n~~~${result?.extractedText}\n`,
url: result.url,
}),
);
} else {
throw new Error("No URL returned from the server.");
}
} catch (error: any) {
self.uploadError = error.message;
} finally {
self.isUploading = false;
}
}),
removeUploadedFile(url: string) {
clientChatStore.removeAttachment(url);
const index = self.uploadedFiles.findIndex(
(uploadedUrl) => uploadedUrl === url,
);
if (index !== -1) {
self.uploadedFiles.splice(index, 1);
self.uploadResults.splice(index, 1);
}
},
}));
export default FileUploadStore.create();

View File

@@ -15,34 +15,21 @@ import {CerebrasSdk} from "../sdk/models/cerebras";
import {CloudflareAISdk} from "../sdk/models/cloudflareAi"; import {CloudflareAISdk} from "../sdk/models/cloudflareAi";
export interface StreamParams { export interface StreamParams {
env: Env; env: Env;
openai: OpenAI; openai: OpenAI;
messages: any[]; messages: any[];
model: string; model: string;
systemPrompt: string; systemPrompt: string;
preprocessedContext: any; preprocessedContext: any;
attachments: any[]; maxTokens: number;
tools: any[];
disableWebhookGeneration: boolean;
maxTokens: number;
}
interface StreamHandlerParams {
controller: ReadableStreamDefaultController;
encoder: TextEncoder;
webhook?: { url: string, payload: unknown };
dynamicContext?: any;
} }
const activeStreamType = types.model({ const activeStreamType = types.model({
name: types.optional(types.string, ""), name: types.optional(types.string, ""),
maxTokens: types.optional(types.number, 0), maxTokens: types.optional(types.number, 0),
systemPrompt: types.optional(types.string, ""), systemPrompt: types.optional(types.string, ""),
model: types.optional(types.string, ""), model: types.optional(types.string, ""),
messages: types.optional(types.array(types.frozen()), []), messages: types.optional(types.array(types.frozen()), []),
attachments: types.optional(types.array(types.frozen()), []),
tools: types.optional(types.array(types.frozen()), []),
disableWebhookGeneration: types.optional(types.boolean, false)
}); });
const activeStreamsMap = types.map( const activeStreamsMap = types.map(
@@ -51,388 +38,266 @@ const activeStreamsMap = types.map(
const ChatService = types const ChatService = types
.model('ChatService', { .model('ChatService', {
openAIApiKey: types.optional(types.string, ""), openAIApiKey: types.optional(types.string, ""),
openAIBaseURL: types.optional(types.string, ""), openAIBaseURL: types.optional(types.string, ""),
activeStreams: types.optional( activeStreams: types.optional(
activeStreamsMap, activeStreamsMap,
{} // Correct initialization {}
), ),
maxTokens: types.number, maxTokens: types.number,
systemPrompt: types.string systemPrompt: types.string
}) })
.volatile(self => ({ .volatile(self => ({
openai: {} as OpenAI, openai: {} as OpenAI,
env: {} as Env, env: {} as Env,
webhookStreamActive: false
})) }))
.actions(self => { .actions(self => {
// Helper functions // Helper functions
const createMessageInstance = (message: any) => { const createMessageInstance = (message: any) => {
if (typeof message.content === 'string') { if (typeof message.content === 'string') {
return Message.create({ return Message.create({
role: message.role, role: message.role,
content: message.content, content: message.content,
}); });
} }
if (Array.isArray(message.content)) { if (Array.isArray(message.content)) {
const m = O1Message.create({ const m = O1Message.create({
role: message.role, role: message.role,
content: message.content.map(item => ({ content: message.content.map(item => ({
type: item.type, type: item.type,
text: item.text text: item.text
})), })),
}); });
return m; return m;
} }
throw new Error('Unsupported message format'); throw new Error('Unsupported message format');
}; };
const createStreamParams = async (
streamConfig: any,
dynamicContext: any,
durableObject: any
): Promise<StreamParams> => {
return {
env: self.env,
openai: self.openai,
messages: streamConfig.messages.map(createMessageInstance),
model: streamConfig.model,
systemPrompt: streamConfig.systemPrompt,
preprocessedContext: getSnapshot(dynamicContext),
maxTokens: await durableObject.dynamicMaxTokens(
streamConfig.messages,
2000
),
}
};
const handleAgentProcess = async ( const modelHandlers = {
{controller, encoder, webhook, dynamicContext}: StreamHandlerParams openai: (params: StreamParams, dataHandler: Function) =>
) => { OpenAiChatSdk.handleOpenAiStream(params, dataHandler),
if (!webhook) return; groq: (params: StreamParams, dataHandler: Function) =>
dynamicContext.append("\n## Agent Results\n~~~markdown\n"); GroqChatSdk.handleGroqStream(params, dataHandler),
claude: (params: StreamParams, dataHandler: Function) =>
ClaudeChatSdk.handleClaudeStream(params, dataHandler),
fireworks: (params: StreamParams, dataHandler: Function) =>
FireworksAiChatSdk.handleFireworksStream(params, dataHandler),
google: (params: StreamParams, dataHandler: Function) =>
GoogleChatSdk.handleGoogleStream(params, dataHandler),
xai: (params: StreamParams, dataHandler: Function) =>
XaiChatSdk.handleXaiStream(params, dataHandler),
cerebras: (params: StreamParams, dataHandler: Function) =>
CerebrasSdk.handleCerebrasStream(params, dataHandler),
cloudflareAI: (params: StreamParams, dataHandler: Function) =>
CloudflareAISdk.handleCloudflareAIStream(params, dataHandler)
};
for await (const chunk of self.streamAgentData({webhook})) {
controller.enqueue(encoder.encode(chunk));
dynamicContext.append(chunk);
}
dynamicContext.append("\n~~~\n");
ChatSdk.sendDoubleNewline(controller, encoder);
};
const createStreamParams = async (
streamConfig: any,
dynamicContext: any,
durableObject: any
): Promise<StreamParams> => {
return { return {
env: self.env, setActiveStream(streamId: string, stream: any) {
openai: self.openai, const validStream = {
messages: streamConfig.messages.map(createMessageInstance), name: stream?.name || "Unnamed Stream",
model: streamConfig.model, maxTokens: stream?.maxTokens || 0,
systemPrompt: streamConfig.systemPrompt, systemPrompt: stream?.systemPrompt || "",
preprocessedContext: getSnapshot(dynamicContext), model: stream?.model || "",
attachments: streamConfig.attachments ?? [], messages: stream?.messages || [],
tools: streamConfig.tools ?? [], };
disableWebhookGeneration: true,
maxTokens: await durableObject.dynamicMaxTokens(
streamConfig.messages,
2000
),
}
};
const modelHandlers = { self.activeStreams.set(streamId, validStream);
openai: (params: StreamParams, dataHandler: Function) => },
OpenAiChatSdk.handleOpenAiStream(params, dataHandler),
groq: (params: StreamParams, dataHandler: Function) =>
GroqChatSdk.handleGroqStream(params, dataHandler),
claude: (params: StreamParams, dataHandler: Function) =>
ClaudeChatSdk.handleClaudeStream(params, dataHandler),
fireworks: (params: StreamParams, dataHandler: Function) =>
FireworksAiChatSdk.handleFireworksStream(params, dataHandler),
google: (params: StreamParams, dataHandler: Function) =>
GoogleChatSdk.handleGoogleStream(params, dataHandler),
xai: (params: StreamParams, dataHandler: Function) =>
XaiChatSdk.handleXaiStream(params, dataHandler),
cerebras: (params: StreamParams, dataHandler: Function) =>
CerebrasSdk.handleCerebrasStream(params, dataHandler),
cloudflareAI: (params: StreamParams, dataHandler: Function) =>
CloudflareAISdk.handleCloudflareAIStream(params, dataHandler)
};
return { removeActiveStream(streamId: string) {
setActiveStream(streamId: string, stream: any) { self.activeStreams.delete(streamId);
const validStream = { },
name: stream?.name || "Unnamed Stream", setEnv(env: Env) {
maxTokens: stream?.maxTokens || 0, self.env = env;
systemPrompt: stream?.systemPrompt || "", self.openai = new OpenAI({
model: stream?.model || "", apiKey: self.openAIApiKey,
messages: stream?.messages || [], baseURL: self.openAIBaseURL,
attachments: stream?.attachments || [], });
tools: stream?.tools || [], },
disableWebhookGeneration: stream?.disableWebhookGeneration || false,
};
self.activeStreams.set(streamId, validStream); handleChatRequest: async (request: Request) => {
}, return ChatSdk.handleChatRequest(request, {
openai: self.openai,
removeActiveStream(streamId: string) { env: self.env,
self.activeStreams.delete(streamId); systemPrompt: self.systemPrompt,
}, maxTokens: self.maxTokens
setEnv(env: Env) { });
self.env = env; },
self.openai = new OpenAI({
apiKey: self.openAIApiKey,
baseURL: self.openAIBaseURL,
});
},
handleChatRequest: async (request: Request) => {
return ChatSdk.handleChatRequest(request, {
openai: self.openai,
env: self.env,
systemPrompt: self.systemPrompt,
maxTokens: self.maxTokens
});
},
setWebhookStreamActive(value) {
self.webhookStreamActive = value;
},
streamAgentData: async function* ({webhook}) {
console.log("streamAgentData::start");
if (self.webhookStreamActive) {
return
}
const queue: string[] = [];
let resolveQueueItem: Function;
let finished = false;
let errorOccurred: Error | null = null;
const dataPromise = () => new Promise<void>((resolve) => {
resolveQueueItem = resolve;
});
let currentPromise = dataPromise();
const eventSource = new EventSource(webhook.url.trim());
console.log("streamAgentData::setWebhookStreamActive::true");
self.setWebhookStreamActive(true)
try {
ChatSdk.handleAgentStream(eventSource, (data) => {
const formattedData = `data: ${JSON.stringify(data)}\n\n`;
queue.push(formattedData);
if (resolveQueueItem) resolveQueueItem();
currentPromise = dataPromise();
}).then(() => {
finished = true;
if (resolveQueueItem) resolveQueueItem();
}).catch((err) => {
console.log(`chatService::streamAgentData::STREAM_ERROR::${err}`);
errorOccurred = err;
if (resolveQueueItem) resolveQueueItem();
});
while (!finished || queue.length > 0) {
if (queue.length > 0) {
yield queue.shift()!;
} else if (errorOccurred) {
throw errorOccurred;
} else {
await currentPromise;
}
}
self.setWebhookStreamActive(false);
eventSource.close();
// console.log(`chatService::streamAgentData::complete`);
} catch (error) {
console.log(`chatService::streamAgentData::error`);
eventSource.close();
self.setWebhookStreamActive(false);
console.error("Error while streaming webhook data:", error);
throw error;
}
},
/**
* runModelHandler
* Selects the correct model handler and invokes it.
*/
async runModelHandler(params: {
streamConfig: any;
streamParams: any;
controller: ReadableStreamDefaultController;
encoder: TextEncoder;
streamId: string;
}) {
const {streamConfig, streamParams, controller, encoder, streamId} = params;
const modelFamily = getModelFamily(streamConfig.model);
console.log(
`chatService::handleSseStream::ReadableStream::modelFamily::${modelFamily}`
);
const handler = modelHandlers[modelFamily as ModelFamily];
if (handler) {
try {
console.log(`chatService::handleSseStream::ReadableStream::${streamId}::handler::start`);
await handler(streamParams, handleStreamData(controller, encoder));
console.log(`chatService::handleSseStream::ReadableStream::${streamId}::handler::finish`);
} catch (error) {
const message = error.message.toLowerCase();
if(message.includes("413 ") || (message.includes("maximum") || message.includes("too long") || message.includes("too large") )) {
throw new ClientError(`Error! Content length exceeds limits. Try shortening your message, removing any attached files, or editing an earlier message instead.`, 413, {model: streamConfig.model, maxTokens: streamParams.maxTokens})
// throw new Error(`Max tokens exceeded for model ${streamConfig.model}`)
}
if(message.includes("429 ")) {
throw new ClientError(`Error! Rate limit exceeded. Wait a few minutes before trying again.`, 429, {model: streamConfig.model, maxTokens: streamParams.maxTokens})
// throw new Error(`Max tokens exceeded for model ${streamConfig.model}`)
}
if (message.includes("404")) {
throw new ClientError(`Something went wrong, try again.`, 413, {})
// throw new Error(`Max tokens exceeded for model ${streamConfig.model}`)
}
throw error;
/*
'413 Request too large for model `mixtral-8x7b-32768` in organization `org_01htjxws48fm0rbbg5gnkgmbrh` service tier `on_demand` on tokens per minute (TPM): Limit 5000, Requested 49590, please reduce your message size and try again. Visit https://console.groq.com/docs/rate-limits for more information.'
*/
}
}
},
/**
* bootstrapAgents
* Checks if an agent exists, and if so, bootstraps it.
*/
async bootstrapAgents(params: {
savedStreamConfig: string;
controller: ReadableStreamDefaultController;
encoder: TextEncoder;
dynamicContext: any; // or more specific type
}) {
const {savedStreamConfig, controller, encoder, dynamicContext} = params;
const config = JSON.parse(savedStreamConfig);
const webhook = config?.webhooks?.[0];
if (webhook) {
console.log(`chatService::handleSseStream::ReadableStream::webhook:start`);
await handleAgentProcess({
controller,
encoder,
webhook,
dynamicContext,
});
console.log(`chatService::handleSseStream::ReadableStream::webhook::end`);
}
},
createSseReadableStream(params: { async runModelHandler(params: {
streamId: string; streamConfig: any;
streamConfig: any; streamParams: any;
savedStreamConfig: string; controller: ReadableStreamDefaultController;
durableObject: any; encoder: TextEncoder;
}) { streamId: string;
const { streamId, streamConfig, savedStreamConfig, durableObject } = params; }) {
const {streamConfig, streamParams, controller, encoder, streamId} = params;
return new ReadableStream({ const modelFamily = getModelFamily(streamConfig.model);
async start(controller) {
console.log(`chatService::handleSseStream::ReadableStream::${streamId}::open`);
const encoder = new TextEncoder();
try { const handler = modelHandlers[modelFamily as ModelFamily];
const dynamicContext = Message.create(streamConfig.preprocessedContext); if (handler) {
try {
await handler(streamParams, handleStreamData(controller, encoder));
} catch (error) {
const message = error.message.toLowerCase();
if (message.includes("413 ") || (message.includes("maximum") || message.includes("too long") || message.includes("too large"))) {
throw new ClientError(`Error! Content length exceeds limits. Try shortening your message or editing an earlier message.`, 413, {
model: streamConfig.model,
maxTokens: streamParams.maxTokens
})
}
if (message.includes("429 ")) {
throw new ClientError(`Error! Rate limit exceeded. Wait a few minutes before trying again.`, 429, {
model: streamConfig.model,
maxTokens: streamParams.maxTokens
})
}
if (message.includes("404")) {
throw new ClientError(`Something went wrong, try again.`, 413, {})
}
throw error;
}
}
},
createSseReadableStream(params: {
streamId: string;
streamConfig: any;
savedStreamConfig: string;
durableObject: any;
}) {
const {streamId, streamConfig, savedStreamConfig, durableObject} = params;
return new ReadableStream({
async start(controller) {
const encoder = new TextEncoder();
try {
const dynamicContext = Message.create(streamConfig.preprocessedContext);
// Process the stream data using the appropriate handler
const streamParams = await createStreamParams(
streamConfig,
dynamicContext,
durableObject
);
try {
await self.runModelHandler({
streamConfig,
streamParams,
controller,
encoder,
streamId,
});
} catch (e) {
console.log("error caught at runModelHandler")
throw e;
}
} catch (error) {
console.error(`chatService::handleSseStream::${streamId}::Error`, error);
if (error instanceof ClientError) {
controller.enqueue(
encoder.encode(`data: ${JSON.stringify({type: 'error', error: error.message})}\n\n`)
);
} else {
controller.enqueue(
encoder.encode(`data: ${JSON.stringify({
type: 'error',
error: "Server error"
})}\n\n`)
);
}
controller.close();
} finally {
try {
controller.close();
} catch (_) {
}
}
},
});
},
// Process agents if configured handleSseStream: flow(function* (streamId: string): Generator<Promise<string>, Response, unknown> {
await self.bootstrapAgents({ console.log(`chatService::handleSseStream::enter::${streamId}`);
savedStreamConfig,
controller, // Check if a stream is already active for this ID
encoder, if (self.activeStreams.has(streamId)) {
dynamicContext: dynamicContext, return new Response('Stream already active', {status: 409});
}
// Retrieve the stream configuration from the durable object
const objectId = self.env.SITE_COORDINATOR.idFromName('stream-index');
const durableObject = self.env.SITE_COORDINATOR.get(objectId);
const savedStreamConfig = yield durableObject.getStreamData(streamId);
if (!savedStreamConfig) {
return new Response('Stream not found', {status: 404});
}
const streamConfig = JSON.parse(savedStreamConfig);
const stream = self.createSseReadableStream({
streamId,
streamConfig,
savedStreamConfig,
durableObject,
}); });
// Process the stream data using the appropriate handler // Use `tee()` to create two streams: one for processing and one for the response
const streamParams = await createStreamParams( const [processingStream, responseStream] = stream.tee();
streamConfig,
dynamicContext, self.setActiveStream(streamId, {
durableObject ...streamConfig,
});
processingStream.pipeTo(
new WritableStream({
close() {
self.removeActiveStream(streamId);
},
})
); );
try { // Return the second stream as the response
await self.runModelHandler({ return new Response(responseStream, {
streamConfig, headers: {
streamParams, 'Content-Type': 'text/event-stream',
controller, 'Cache-Control': 'no-cache',
encoder, 'Connection': 'keep-alive',
streamId, },
}); });
} catch (e) { }),
console.log("error caught at runModelHandler") };
throw e;
}
} catch (error) {
console.error(`chatService::handleSseStream::${streamId}::Error`, error);
if(error instanceof ClientError) {
controller.enqueue(
encoder.encode(`data: ${JSON.stringify({ type: 'error', error: error.message })}\n\n`)
);
} else {
controller.enqueue(
encoder.encode(`data: ${JSON.stringify({ type: 'error', error: "Server error" })}\n\n`)
);
}
controller.close();
} finally {
try {
controller.close();
} catch (_) {}
}
},
});
},
handleSseStream: flow(function* (streamId: string): Generator<Promise<string>, Response, unknown> {
console.log(`chatService::handleSseStream::enter::${streamId}`);
// Check if a stream is already active for this ID
if (self.activeStreams.has(streamId)) {
console.log(`chatService::handleSseStream::${streamId}::[stream already active]`);
return new Response('Stream already active', { status: 409 });
}
// Retrieve the stream configuration from the durable object
const objectId = self.env.SITE_COORDINATOR.idFromName('stream-index');
const durableObject = self.env.SITE_COORDINATOR.get(objectId);
const savedStreamConfig = yield durableObject.getStreamData(streamId);
if (!savedStreamConfig) {
return new Response('Stream not found', { status: 404 });
}
const streamConfig = JSON.parse(savedStreamConfig);
console.log(`chatService::handleSseStream::${streamId}::[stream configured]`);
const stream = self.createSseReadableStream({
streamId,
streamConfig,
savedStreamConfig,
durableObject,
});
// Use `tee()` to create two streams: one for processing and one for the response
const [processingStream, responseStream] = stream.tee();
self.setActiveStream(streamId, {
...streamConfig,
});
processingStream.pipeTo(
new WritableStream({
close() {
self.removeActiveStream(streamId);
},
})
);
// Return the second stream as the response
return new Response(responseStream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
},
});
}),
};
}); });
@@ -441,28 +306,28 @@ const ChatService = types
* A custom construct for sending client-friendly errors via the controller in a structured and controlled manner. * A custom construct for sending client-friendly errors via the controller in a structured and controlled manner.
*/ */
export class ClientError extends Error { export class ClientError extends Error {
public statusCode: number; public statusCode: number;
public details: Record<string, any>; public details: Record<string, any>;
constructor(message: string, statusCode: number, details: Record<string, any> = {}) { constructor(message: string, statusCode: number, details: Record<string, any> = {}) {
super(message); super(message);
this.name = 'ClientError'; this.name = 'ClientError';
this.statusCode = statusCode; this.statusCode = statusCode;
this.details = details; this.details = details;
Object.setPrototypeOf(this, ClientError.prototype); Object.setPrototypeOf(this, ClientError.prototype);
} }
/** /**
* Formats the error for SSE-compatible data transmission. * Formats the error for SSE-compatible data transmission.
*/ */
public formatForSSE(): string { public formatForSSE(): string {
return JSON.stringify({ return JSON.stringify({
type: 'error', type: 'error',
message: this.message, message: this.message,
details: this.details, details: this.details,
statusCode: this.statusCode, statusCode: this.statusCode,
}); });
} }
} }
export default ChatService; export default ChatService;