Refactor ServerCoordinator and project structure for clearer durable objects organization and module imports.

This commit is contained in:
geoffsee
2025-06-18 15:53:17 -04:00
parent afc46fe2c3
commit 02ede2b0f6
7 changed files with 31 additions and 14 deletions

View File

@@ -6,10 +6,8 @@
<img src="https://github.com/user-attachments/assets/620d2517-e7be-4bb0-b2b7-3aa0cba37ef0" width="250" />
</p>
> **Note**: I am porting logic from 3 projects back into this one. The styling is a work in progress and some functionality
> may be broken. Tests are being actively ported and stability will improve over time. Thank you for your patience.
This is a full-stack Conversational AI. It runs on Cloudflare or Bun.
This is a full-stack Conversational AI.
## Table of Contents
@@ -22,7 +20,6 @@ This is a full-stack Conversational AI. It runs on Cloudflare or Bun.
- [Adding models](#adding-models-for-local-inference-ollama)
- [Testing](#testing)
- [Troubleshooting](#troubleshooting)
- [History](#history)
- [Acknowledgments](#acknowledgments)
- [License](#license)
@@ -41,8 +38,7 @@ This is a full-stack Conversational AI. It runs on Cloudflare or Bun.
1. Run `bun run deploy && bun run deploy:secrets && bun run deploy`
> Note: Subsequent deployments should omit `bun run deploy:secrets`
## Local Inference
> Local inference is supported for Ollama and mlx-omni-server. OpenAI compatible servers can be used by overriding OPENAI_API_KEY and OPENAI_API_ENDPOINT.

View File

@@ -1,5 +1,5 @@
import Server from "@open-gsio/server";
import ServerCoordinator from "@open-gsio/server/ServerCoordinator";
import ServerCoordinator from "packages/server/durable-objects/ServerCoordinator";
export {ServerCoordinator}

View File

@@ -4,7 +4,7 @@ interface Env {
EMAIL_SERVICE: any;
// Durable Objects
SERVER_COORDINATOR: import("packages/server/ServerCoordinator.ts");
SERVER_COORDINATOR: import("packages/server/durable-objects/ServerCoordinator.ts");
// Handles serving static assets
ASSETS: Fetcher;

20
packages/server/README.md Normal file
View File

@@ -0,0 +1,20 @@
# @open-gsio/server
This directory contains the server component of open-gsio, a full-stack Conversational AI application. The server handles API requests, manages AI model interactions, serves static assets, and provides server-side rendering capabilities.
## Directory Structure
- `__tests__/`: Contains test files for the server components
- `services/`: Contains service modules for different functionalities
- `AssetService.ts`: Handles static assets and SSR
- `ChatService.ts`: Manages chat interactions with AI models
- `ContactService.ts`: Processes contact form submissions
- `FeedbackService.ts`: Handles user feedback
- `MetricsService.ts`: Collects and processes metrics
- `TransactionService.ts`: Manages transactions
- `durable_objects/`: Contains durable object implementations
- `ServerCoordinator.ts`: Cloudflare Implementation
- `ServerCoordinatorBun.ts`: Bun Implementation
- `api-router.ts`: API Router
- `RequestContext.ts`: Application Context
- `server.ts`: Main server entry point

View File

@@ -1,5 +1,5 @@
import { DurableObject } from "cloudflare:workers";
import {ProviderRepository} from "./providers/_ProviderRepository";
import {ProviderRepository} from "../providers/_ProviderRepository";
export default class ServerCoordinator extends DurableObject {
env;

View File

@@ -1,4 +1,4 @@
import {BunSqliteKVNamespace} from "./storage/BunSqliteKVNamespace";
import {BunSqliteKVNamespace} from "../storage/BunSqliteKVNamespace";
class BunDurableObject {

View File

@@ -2,9 +2,10 @@ import {BunSqliteKVNamespace} from "./storage/BunSqliteKVNamespace";
import {readdir} from 'node:fs/promises';
import type { RequestLike } from "itty-router";
import {config} from "dotenv";
import ServerCoordinator from "./durable-objects/ServerCoordinatorBun";
import Server from ".";
import DurableObjectLocal from "./ServerCoordinatorBun";
import {config} from "dotenv";
const router = Server.Router();
@@ -21,7 +22,7 @@ export default {
fetch: async (request: RequestLike, env: { [key: string]: any; }, ctx: any) =>{
// console.log("[trace] request: ", request.method, request.url, "headers: ", request.headers.get("referer"), "body: ", request.body, "env: ", env, "ctx: ", ctx, "")
env["SERVER_COORDINATOR"] = DurableObjectLocal;
env["SERVER_COORDINATOR"] = ServerCoordinator;
env["ASSETS"] = assetHandler.ASSETS;
env["EVENTSOURCE_HOST"] = process.env.EVENTSOURCE_HOST;
env["GROQ_API_KEY"] = process.env.GROQ_API_KEY;
@@ -57,7 +58,7 @@ export default {
}
}
const assetHandler = {
export const assetHandler = {
ASSETS: {
/**
* Fetches the requested static asset from local dist