Remove file upload functionality and related components

The `FileUploadStore` and all file upload features were removed, simplifying the chat interface. This change eliminates unused code, including file handling logic, attachment management, and UI elements, streamlining the application.
This commit is contained in:
geoffsee
2025-05-27 14:01:41 -04:00
committed by Geoff Seemueller
parent 18ba4aec21
commit c04e19611e
12 changed files with 284 additions and 588 deletions

View File

@@ -1,15 +1,6 @@
import React, {useEffect, useRef, useState} from "react";
import {observer} from "mobx-react-lite";
import {
Alert,
AlertIcon,
Box,
chakra,
HStack,
InputGroup,
} from "@chakra-ui/react";
import fileUploadStore from "../../stores/FileUploadStore";
import { UploadedItem } from "./Attachments";
import {Box, chakra, InputGroup,} from "@chakra-ui/react";
import AutoResize from "react-textarea-autosize";
const AutoResizeTextArea = chakra(AutoResize);
@@ -24,24 +15,6 @@ interface InputTextAreaProps {
const InputTextArea: React.FC<InputTextAreaProps> = observer(
({ inputRef, value, onChange, onKeyDown, isLoading }) => {
const fileInputRef = useRef<HTMLInputElement>(null);
const handleAttachmentClick = () => {
if (fileInputRef.current) {
fileInputRef.current.click();
}
};
const handleFileChange = (event: React.ChangeEvent<HTMLInputElement>) => {
const file = event.target.files?.[0];
if (file) {
fileUploadStore.uploadFile(file, "/api/documents");
}
};
const handleRemoveUploadedItem = (url: string) => {
fileUploadStore.removeUploadedFile(url);
};
const [heightConstraint, setHeightConstraint] = useState<
number | undefined
@@ -61,26 +34,6 @@ const InputTextArea: React.FC<InputTextAreaProps> = observer(
display="flex"
flexDirection="column"
>
{/* Attachments Section */}
{fileUploadStore.uploadResults.length > 0 && (
<HStack
spacing={2}
mb={2}
overflowX="auto"
css={{ "&::-webkit-scrollbar": { display: "none" } }}
// Ensure attachments wrap if needed
flexWrap="wrap"
>
{fileUploadStore.uploadResults.map((result) => (
<UploadedItem
key={result.url}
url={result.url}
name={result.name}
onRemove={() => handleRemoveUploadedItem(result.url)}
/>
))}
</HStack>
)}
{/* Input Area */}
<InputGroup position="relative">
@@ -97,7 +50,7 @@ const InputTextArea: React.FC<InputTextAreaProps> = observer(
pl="17px"
bg="rgba(255, 255, 255, 0.15)"
color="text.primary"
borderRadius="20px" // Set a consistent border radius
borderRadius="20px"
border="none"
placeholder="Free my mind..."
_placeholder={{ color: "gray.400" }}
@@ -115,32 +68,7 @@ const InputTextArea: React.FC<InputTextAreaProps> = observer(
transition: "height 0.2s ease-in-out",
}}
/>
{/*<InputRightElement*/}
{/* position="absolute"*/}
{/* right={0}*/}
{/* top={0}*/}
{/* bottom={0}*/}
{/* width="40px"*/}
{/* height="100%"*/}
{/* display="flex"*/}
{/* alignItems="center"*/}
{/* justifyContent="center"*/}
{/*>*/}
{/*<EnableSearchButton />*/}
{/*</InputRightElement>*/}
</InputGroup>
<input
type="file"
ref={fileInputRef}
style={{ display: "none" }}
onChange={handleFileChange}
/>
{fileUploadStore.uploadError && (
<Alert status="error" mt={2}>
<AlertIcon />
{fileUploadStore.uploadError}
</Alert>
)}
</Box>
);
},

View File

@@ -1,3 +1,4 @@
// runs before anything else
import UserOptionsStore from "../stores/UserOptionsStore";
UserOptionsStore.initialize();

View File

@@ -1,10 +1,10 @@
// https://vike.dev/data
import Routes from "../../src/renderer/routes";
export { data };
export type Data = Awaited<ReturnType<typeof data>>;
import type { PageContextServer } from "vike/types";
// sets the window title depending on the route
const data = async (pageContext: PageContextServer) => {
const getTitle = (path) => {
return Routes[normalizePath(path)]?.heroLabel || "";

View File

@@ -1,3 +1,4 @@
// client error catcher
import { usePageContext } from "../../renderer/usePageContext";
import { Center, Text } from "@chakra-ui/react";

View File

@@ -4,7 +4,7 @@ import Chat from "../../components/chat/Chat";
import clientChatStore from "../../stores/ClientChatStore";
import { getModelFamily } from "../../components/chat/SupportedModels";
// renders for path: "/"
// renders "/"
export default function IndexPage() {
useEffect(() => {
try {

View File

@@ -1,10 +1,10 @@
// https://vike.dev/onRenderClient
export { onRenderClient };
import React from "react";
import { hydrateRoot } from "react-dom/client";
import { Layout } from "../layout/Layout";
// See https://vike.dev/onRenderClient for usage details
async function onRenderClient(pageContext) {
const { Page, pageProps } = pageContext;
hydrateRoot(

View File

@@ -1,5 +1,4 @@
import React from "react";
// https://vike.dev/onRenderHtml
export { onRenderHtml };
import { renderToStream } from "react-streaming/server";
@@ -7,6 +6,7 @@ import { escapeInject } from "vike/server";
import { Layout } from "../layout/Layout";
import type { OnRenderHtmlAsync } from "vike/types";
// See https://vike.dev/onRenderHtml for usage details
const onRenderHtml: OnRenderHtmlAsync = async (
pageContext,
): ReturnType<OnRenderHtmlAsync> => {
@@ -49,8 +49,6 @@ window.ga_api = "/api/metrics";
return {
documentHtml: res,
pageContext: {
// enableEagerStreaming: true
},
pageContext: {},
};
};

View File

@@ -1,3 +1,4 @@
// Top level control interface for navigation
export default {
"/": { sidebarLabel: "Home", heroLabel: "g.s" },
// "/about": { sidebarLabel: "About", heroLabel: "About Me" },

View File

@@ -1,4 +1,3 @@
// renderer/types.ts
export type { PageProps };
type Page = (pageProps: PageProps) => React.ReactElement;
@@ -10,8 +9,6 @@ declare global {
Page: Page;
pageProps?: PageProps;
fetch?: typeof fetch;
// Add your environment bindings here
env: import("../../workers/site/env");
}
}

View File

@@ -1,6 +1,5 @@
import { applySnapshot, flow, Instance, types } from "mobx-state-tree";
import Message from "../models/Message";
import Attachment from "../models/Attachment";
import IntermediateStep from "../models/IntermediateStep";
import UserOptionsStore from "./UserOptionsStore";
@@ -11,7 +10,6 @@ const ClientChatStore = types
isLoading: types.optional(types.boolean, false),
model: types.optional(types.string, "llama-3.3-70b-versatile"),
imageModel: types.optional(types.string, "black-forest-labs/flux-1.1-pro"),
attachments: types.optional(types.array(Attachment), []),
tools: types.optional(types.array(types.string), []),
intermediateSteps: types.array(IntermediateStep),
})
@@ -41,7 +39,6 @@ const ClientChatStore = types
const payload = {
messages: self.messages.slice(),
model: self.model,
attachments: self.attachments.slice(),
tools: self.tools.slice(),
};
@@ -163,7 +160,6 @@ const ClientChatStore = types
const payload = {
messages: self.messages.slice(),
model: self.model,
attachments: self.attachments.slice(),
tools: self.tools.slice(),
};
@@ -244,15 +240,6 @@ const ClientChatStore = types
reset() {
applySnapshot(self, {});
},
addAttachment(attachment: Instance<typeof Attachment>) {
self.attachments.push(attachment);
if (self.attachments.length > 0) {
if (!self.tools.includes("user-attachments")) {
self.tools.push("user-attachments");
}
}
},
addIntermediateStep(stepData) {
return;
},
@@ -271,21 +258,6 @@ const ClientChatStore = types
self.messages.splice(index + 1);
}
},
removeAttachment(url: string) {
const f =
self.attachments.filter((attachment) => attachment.url !== url) ?? [];
self.attachments.clear();
self.attachments.push(...f);
if (self.attachments.length === 0) {
const remainingTools = self.tools.filter(
(tool) => tool !== "user-attachments",
);
self.tools.clear();
self.tools.push(...remainingTools);
}
},
setTools(tools: string[]) {
self.tools.clear();
self.tools.push(...tools);

View File

@@ -1,67 +0,0 @@
import { types, flow } from "mobx-state-tree";
import clientChatStore from "./ClientChatStore";
import Attachment from "../models/Attachment";
const FileUploadStore = types
.model("FileUploadStore", {
isUploading: types.optional(types.boolean, false),
uploadError: types.maybeNull(types.string),
uploadedFiles: types.array(types.string),
uploadResults: types.array(types.frozen()),
})
.actions((self) => ({
uploadFile: flow(function* (file: File, endpoint: string) {
if (!endpoint) {
self.uploadError = "Endpoint URL is required.";
return;
}
self.isUploading = true;
self.uploadError = null;
const formData = new FormData();
formData.append("file", file);
try {
const response = yield fetch(endpoint, {
method: "POST",
body: formData,
});
if (!response.ok) {
throw new Error(`Upload failed with status: ${response.status}`);
}
const result = yield response.json();
self.uploadResults.push(result);
if (result.url) {
self.uploadedFiles.push(result.url);
clientChatStore.addAttachment(
Attachment.create({
content: `${file.name}\n~~~${result?.extractedText}\n`,
url: result.url,
}),
);
} else {
throw new Error("No URL returned from the server.");
}
} catch (error: any) {
self.uploadError = error.message;
} finally {
self.isUploading = false;
}
}),
removeUploadedFile(url: string) {
clientChatStore.removeAttachment(url);
const index = self.uploadedFiles.findIndex(
(uploadedUrl) => uploadedUrl === url,
);
if (index !== -1) {
self.uploadedFiles.splice(index, 1);
self.uploadResults.splice(index, 1);
}
},
}));
export default FileUploadStore.create();

View File

@@ -21,28 +21,15 @@ export interface StreamParams {
model: string;
systemPrompt: string;
preprocessedContext: any;
attachments: any[];
tools: any[];
disableWebhookGeneration: boolean;
maxTokens: number;
}
interface StreamHandlerParams {
controller: ReadableStreamDefaultController;
encoder: TextEncoder;
webhook?: { url: string, payload: unknown };
dynamicContext?: any;
}
const activeStreamType = types.model({
name: types.optional(types.string, ""),
maxTokens: types.optional(types.number, 0),
systemPrompt: types.optional(types.string, ""),
model: types.optional(types.string, ""),
messages: types.optional(types.array(types.frozen()), []),
attachments: types.optional(types.array(types.frozen()), []),
tools: types.optional(types.array(types.frozen()), []),
disableWebhookGeneration: types.optional(types.boolean, false)
});
const activeStreamsMap = types.map(
@@ -55,7 +42,7 @@ const ChatService = types
openAIBaseURL: types.optional(types.string, ""),
activeStreams: types.optional(
activeStreamsMap,
{} // Correct initialization
{}
),
maxTokens: types.number,
systemPrompt: types.string
@@ -63,7 +50,6 @@ const ChatService = types
.volatile(self => ({
openai: {} as OpenAI,
env: {} as Env,
webhookStreamActive: false
}))
.actions(self => {
// Helper functions
@@ -87,22 +73,6 @@ const ChatService = types
throw new Error('Unsupported message format');
};
const handleAgentProcess = async (
{controller, encoder, webhook, dynamicContext}: StreamHandlerParams
) => {
if (!webhook) return;
dynamicContext.append("\n## Agent Results\n~~~markdown\n");
for await (const chunk of self.streamAgentData({webhook})) {
controller.enqueue(encoder.encode(chunk));
dynamicContext.append(chunk);
}
dynamicContext.append("\n~~~\n");
ChatSdk.sendDoubleNewline(controller, encoder);
};
const createStreamParams = async (
streamConfig: any,
dynamicContext: any,
@@ -115,9 +85,6 @@ const ChatService = types
model: streamConfig.model,
systemPrompt: streamConfig.systemPrompt,
preprocessedContext: getSnapshot(dynamicContext),
attachments: streamConfig.attachments ?? [],
tools: streamConfig.tools ?? [],
disableWebhookGeneration: true,
maxTokens: await durableObject.dynamicMaxTokens(
streamConfig.messages,
2000
@@ -152,9 +119,6 @@ const ChatService = types
systemPrompt: stream?.systemPrompt || "",
model: stream?.model || "",
messages: stream?.messages || [],
attachments: stream?.attachments || [],
tools: stream?.tools || [],
disableWebhookGeneration: stream?.disableWebhookGeneration || false,
};
self.activeStreams.set(streamId, validStream);
@@ -180,68 +144,7 @@ const ChatService = types
});
},
setWebhookStreamActive(value) {
self.webhookStreamActive = value;
},
streamAgentData: async function* ({webhook}) {
console.log("streamAgentData::start");
if (self.webhookStreamActive) {
return
}
const queue: string[] = [];
let resolveQueueItem: Function;
let finished = false;
let errorOccurred: Error | null = null;
const dataPromise = () => new Promise<void>((resolve) => {
resolveQueueItem = resolve;
});
let currentPromise = dataPromise();
const eventSource = new EventSource(webhook.url.trim());
console.log("streamAgentData::setWebhookStreamActive::true");
self.setWebhookStreamActive(true)
try {
ChatSdk.handleAgentStream(eventSource, (data) => {
const formattedData = `data: ${JSON.stringify(data)}\n\n`;
queue.push(formattedData);
if (resolveQueueItem) resolveQueueItem();
currentPromise = dataPromise();
}).then(() => {
finished = true;
if (resolveQueueItem) resolveQueueItem();
}).catch((err) => {
console.log(`chatService::streamAgentData::STREAM_ERROR::${err}`);
errorOccurred = err;
if (resolveQueueItem) resolveQueueItem();
});
while (!finished || queue.length > 0) {
if (queue.length > 0) {
yield queue.shift()!;
} else if (errorOccurred) {
throw errorOccurred;
} else {
await currentPromise;
}
}
self.setWebhookStreamActive(false);
eventSource.close();
// console.log(`chatService::streamAgentData::complete`);
} catch (error) {
console.log(`chatService::streamAgentData::error`);
eventSource.close();
self.setWebhookStreamActive(false);
console.error("Error while streaming webhook data:", error);
throw error;
}
},
/**
* runModelHandler
* Selects the correct model handler and invokes it.
*/
async runModelHandler(params: {
streamConfig: any;
streamParams: any;
@@ -252,65 +155,35 @@ const ChatService = types
const {streamConfig, streamParams, controller, encoder, streamId} = params;
const modelFamily = getModelFamily(streamConfig.model);
console.log(
`chatService::handleSseStream::ReadableStream::modelFamily::${modelFamily}`
);
const handler = modelHandlers[modelFamily as ModelFamily];
if (handler) {
try {
console.log(`chatService::handleSseStream::ReadableStream::${streamId}::handler::start`);
await handler(streamParams, handleStreamData(controller, encoder));
console.log(`chatService::handleSseStream::ReadableStream::${streamId}::handler::finish`);
} catch (error) {
const message = error.message.toLowerCase();
if (message.includes("413 ") || (message.includes("maximum") || message.includes("too long") || message.includes("too large"))) {
throw new ClientError(`Error! Content length exceeds limits. Try shortening your message, removing any attached files, or editing an earlier message instead.`, 413, {model: streamConfig.model, maxTokens: streamParams.maxTokens})
// throw new Error(`Max tokens exceeded for model ${streamConfig.model}`)
throw new ClientError(`Error! Content length exceeds limits. Try shortening your message or editing an earlier message.`, 413, {
model: streamConfig.model,
maxTokens: streamParams.maxTokens
})
}
if (message.includes("429 ")) {
throw new ClientError(`Error! Rate limit exceeded. Wait a few minutes before trying again.`, 429, {model: streamConfig.model, maxTokens: streamParams.maxTokens})
// throw new Error(`Max tokens exceeded for model ${streamConfig.model}`)
throw new ClientError(`Error! Rate limit exceeded. Wait a few minutes before trying again.`, 429, {
model: streamConfig.model,
maxTokens: streamParams.maxTokens
})
}
if (message.includes("404")) {
throw new ClientError(`Something went wrong, try again.`, 413, {})
// throw new Error(`Max tokens exceeded for model ${streamConfig.model}`)
}
throw error;
/*
'413 Request too large for model `mixtral-8x7b-32768` in organization `org_01htjxws48fm0rbbg5gnkgmbrh` service tier `on_demand` on tokens per minute (TPM): Limit 5000, Requested 49590, please reduce your message size and try again. Visit https://console.groq.com/docs/rate-limits for more information.'
*/
}
}
},
/**
* bootstrapAgents
* Checks if an agent exists, and if so, bootstraps it.
*/
async bootstrapAgents(params: {
savedStreamConfig: string;
controller: ReadableStreamDefaultController;
encoder: TextEncoder;
dynamicContext: any; // or more specific type
}) {
const {savedStreamConfig, controller, encoder, dynamicContext} = params;
const config = JSON.parse(savedStreamConfig);
const webhook = config?.webhooks?.[0];
if (webhook) {
console.log(`chatService::handleSseStream::ReadableStream::webhook:start`);
await handleAgentProcess({
controller,
encoder,
webhook,
dynamicContext,
});
console.log(`chatService::handleSseStream::ReadableStream::webhook::end`);
}
},
createSseReadableStream(params: {
streamId: string;
@@ -322,21 +195,11 @@ const ChatService = types
return new ReadableStream({
async start(controller) {
console.log(`chatService::handleSseStream::ReadableStream::${streamId}::open`);
const encoder = new TextEncoder();
try {
const dynamicContext = Message.create(streamConfig.preprocessedContext);
// Process agents if configured
await self.bootstrapAgents({
savedStreamConfig,
controller,
encoder,
dynamicContext: dynamicContext,
});
// Process the stream data using the appropriate handler
const streamParams = await createStreamParams(
streamConfig,
@@ -366,14 +229,18 @@ const ChatService = types
);
} else {
controller.enqueue(
encoder.encode(`data: ${JSON.stringify({ type: 'error', error: "Server error" })}\n\n`)
encoder.encode(`data: ${JSON.stringify({
type: 'error',
error: "Server error"
})}\n\n`)
);
}
controller.close();
} finally {
try {
controller.close();
} catch (_) {}
} catch (_) {
}
}
},
});
@@ -385,7 +252,6 @@ const ChatService = types
// Check if a stream is already active for this ID
if (self.activeStreams.has(streamId)) {
console.log(`chatService::handleSseStream::${streamId}::[stream already active]`);
return new Response('Stream already active', {status: 409});
}
@@ -399,7 +265,6 @@ const ChatService = types
}
const streamConfig = JSON.parse(savedStreamConfig);
console.log(`chatService::handleSseStream::${streamId}::[stream configured]`);
const stream = self.createSseReadableStream({
streamId,