Remove file upload functionality and related components

The `FileUploadStore` and all file upload features were removed, simplifying the chat interface. This change eliminates unused code, including file handling logic, attachment management, and UI elements, streamlining the application.
This commit is contained in:
geoffsee
2025-05-27 14:15:12 -04:00
committed by Geoff Seemueller
parent c04e19611e
commit 47272ba350
13 changed files with 105 additions and 302 deletions

View File

@@ -21,12 +21,10 @@ export class CerebrasSdk {
}>,
_NotCustomized
>;
attachments: any;
maxTokens: unknown | number | undefined;
messages: any;
model: string;
env: Env;
tools: any;
},
dataCallback: (data) => void,
) {
@@ -35,15 +33,12 @@ export class CerebrasSdk {
messages,
env,
maxTokens,
tools,
systemPrompt,
model,
attachments,
} = param;
const assistantPrompt = ChatSdk.buildAssistantPrompt({
maxTokens: maxTokens,
tools: tools,
});
const safeMessages = ChatSdk.buildMessageChain(messages, {
@@ -51,7 +46,6 @@ export class CerebrasSdk {
model,
assistantPrompt,
toolResults: preprocessedContext,
attachments: attachments,
});
const openai = new OpenAI({

View File

@@ -49,7 +49,6 @@ export class ClaudeChatSdk {
param: {
openai: OpenAI;
systemPrompt: any;
disableWebhookGeneration: boolean;
preprocessedContext: ModelSnapshotType2<
ModelPropertiesDeclarationToProperties<{
role: ISimpleType<UnionStringArray<string[]>>;
@@ -57,12 +56,10 @@ export class ClaudeChatSdk {
}>,
_NotCustomized
>;
attachments: any;
maxTokens: unknown | number | undefined;
messages: any;
model: string;
env: Env;
tools: any;
},
dataCallback: (data) => void,
) {
@@ -71,15 +68,12 @@ export class ClaudeChatSdk {
messages,
env,
maxTokens,
tools,
systemPrompt,
model,
attachments,
} = param;
const assistantPrompt = ChatSdk.buildAssistantPrompt({
maxTokens: maxTokens,
tools: tools,
});
const safeMessages = ChatSdk.buildMessageChain(messages, {
@@ -87,7 +81,6 @@ export class ClaudeChatSdk {
model,
assistantPrompt,
toolResults: preprocessedContext,
attachments: attachments,
});
const anthropic = new Anthropic({

View File

@@ -13,7 +13,6 @@ export class CloudflareAISdk {
param: {
openai: OpenAI;
systemPrompt: any;
disableWebhookGeneration: boolean;
preprocessedContext: ModelSnapshotType2<
ModelPropertiesDeclarationToProperties<{
role: ISimpleType<UnionStringArray<string[]>>;
@@ -21,12 +20,10 @@ export class CloudflareAISdk {
}>,
_NotCustomized
>;
attachments: any;
maxTokens: unknown | number | undefined;
messages: any;
model: string;
env: Env;
tools: any;
},
dataCallback: (data) => void,
) {
@@ -35,22 +32,18 @@ export class CloudflareAISdk {
messages,
env,
maxTokens,
tools,
systemPrompt,
model,
attachments,
} = param;
const assistantPrompt = ChatSdk.buildAssistantPrompt({
maxTokens: maxTokens,
tools: tools,
});
const safeMessages = ChatSdk.buildMessageChain(messages, {
systemPrompt: systemPrompt,
model,
assistantPrompt,
toolResults: preprocessedContext,
attachments: attachments,
});
const cfAiURL = `https://api.cloudflare.com/client/v4/accounts/${env.CLOUDFLARE_ACCOUNT_ID}/ai/v1`;

View File

@@ -42,7 +42,6 @@ export class FireworksAiChatSdk {
param: {
openai: OpenAI;
systemPrompt: any;
disableWebhookGeneration: boolean;
preprocessedContext: ModelSnapshotType2<
ModelPropertiesDeclarationToProperties<{
role: ISimpleType<UnionStringArray<string[]>>;
@@ -50,12 +49,10 @@ export class FireworksAiChatSdk {
}>,
_NotCustomized
>;
attachments: any;
maxTokens: number;
messages: any;
model: any;
env: Env;
tools: any;
},
dataCallback: (data) => void,
) {
@@ -64,15 +61,12 @@ export class FireworksAiChatSdk {
messages,
env,
maxTokens,
tools,
systemPrompt,
model,
attachments,
} = param;
const assistantPrompt = ChatSdk.buildAssistantPrompt({
maxTokens: maxTokens,
tools: tools,
});
const safeMessages = ChatSdk.buildMessageChain(messages, {
@@ -80,7 +74,6 @@ export class FireworksAiChatSdk {
model,
assistantPrompt,
toolResults: preprocessedContext,
attachments: attachments,
});
const fireworksOpenAIClient = new OpenAI({

View File

@@ -12,15 +12,12 @@ export class GoogleChatSdk {
messages,
env,
maxTokens,
tools,
systemPrompt,
model,
attachments,
} = param;
const assistantPrompt = ChatSdk.buildAssistantPrompt({
maxTokens: maxTokens,
tools: tools,
});
const safeMessages = ChatSdk.buildMessageChain(messages, {
@@ -28,7 +25,6 @@ export class GoogleChatSdk {
model,
assistantPrompt,
toolResults: preprocessedContext,
attachments: attachments,
});
const openai = new OpenAI({

View File

@@ -13,7 +13,6 @@ export class GroqChatSdk {
param: {
openai: OpenAI;
systemPrompt: any;
disableWebhookGeneration: boolean;
preprocessedContext: ModelSnapshotType2<
ModelPropertiesDeclarationToProperties<{
role: ISimpleType<UnionStringArray<string[]>>;
@@ -21,12 +20,10 @@ export class GroqChatSdk {
}>,
_NotCustomized
>;
attachments: any;
maxTokens: unknown | number | undefined;
messages: any;
model: string;
env: Env;
tools: any;
},
dataCallback: (data) => void,
) {
@@ -35,22 +32,18 @@ export class GroqChatSdk {
messages,
env,
maxTokens,
tools,
systemPrompt,
model,
attachments,
} = param;
const assistantPrompt = ChatSdk.buildAssistantPrompt({
maxTokens: maxTokens,
tools: tools,
});
const safeMessages = ChatSdk.buildMessageChain(messages, {
systemPrompt: systemPrompt,
model,
assistantPrompt,
toolResults: preprocessedContext,
attachments: attachments,
});
const openai = new OpenAI({

View File

@@ -7,12 +7,9 @@ export class OpenAiChatSdk {
openai: OpenAI;
systemPrompt: any;
preprocessedContext: any;
attachments: any;
maxTokens: unknown | number | undefined;
messages: any;
disableWebhookGeneration: boolean;
model: any;
tools: any;
},
dataCallback: (data: any) => any,
) {
@@ -20,9 +17,7 @@ export class OpenAiChatSdk {
openai,
systemPrompt,
maxTokens,
tools,
messages,
attachments,
model,
preprocessedContext,
} = ctx;
@@ -33,14 +28,12 @@ export class OpenAiChatSdk {
const assistantPrompt = ChatSdk.buildAssistantPrompt({
maxTokens: maxTokens,
tools: tools,
});
const safeMessages = ChatSdk.buildMessageChain(messages, {
systemPrompt: systemPrompt,
model,
assistantPrompt,
toolResults: preprocessedContext,
attachments: attachments,
});
return OpenAiChatSdk.streamOpenAiResponse(

View File

@@ -7,13 +7,11 @@ export class XaiChatSdk {
openai: OpenAI;
systemPrompt: any;
preprocessedContext: any;
attachments: any;
maxTokens: unknown | number | undefined;
messages: any;
disableWebhookGeneration: boolean;
model: any;
env: Env;
tools: any;
},
dataCallback: (data: any) => any,
) {
@@ -21,7 +19,6 @@ export class XaiChatSdk {
openai,
systemPrompt,
maxTokens,
tools,
messages,
attachments,
env,
@@ -49,7 +46,6 @@ export class XaiChatSdk {
const assistantPrompt = ChatSdk.buildAssistantPrompt({
maxTokens: maxTokens,
tools: tools,
});
const safeMessages = ChatSdk.buildMessageChain(messages, {
@@ -57,7 +53,6 @@ export class XaiChatSdk {
model,
assistantPrompt,
toolResults: preprocessedContext,
attachments: attachments,
});
const xAiClient = new OpenAI({