update project structure

This commit is contained in:
geoffsee
2025-06-05 22:42:17 -04:00
parent 1270a6b0ba
commit c5b8bd812c
45 changed files with 4921 additions and 128 deletions

4880
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,36 +1,7 @@
[package]
name = "open-web-agent-rs"
version = "0.1.0"
edition = "2021"
license = "MIT"
[workspace]
members = [
"crates/agent-server",
"crates/local_inference_engine",
]
[[bin]]
edition = "2021"
name = "agent-server"
path = "src/main.rs"
[dependencies]
axum = { version = "0.8", features = ["multipart"] }
serde = { version = "1.0", features = ["derive"] }
tokio = { version = "1.0", features = ["full"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
http = "1.1.0"
tokio-stream = "0.1.16"
uuid = { version = "1.11.0", features = ["v4"] }
tokio-util = { version = "0.7", features = ["io"] }
serde_json = "1.0.133"
futures = "0.3.31"
dotenv = "0.15.0"
shell-escape = "0.1.5"
rust-embed = "8.5.0"
bytes = "1.8.0"
lazy_static = "1.5.0"
sled = "0.34.7"
tower-http = { version = "0.6.2", features = ["trace", "cors"] }
tower = "0.5.2"
anyhow = "1.0.97"
base64 = "0.22.1"
fips204 = "0.4.6"
rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", branch = "main", features = ["server", "transport-streamable-http-server", "transport-sse-server", "transport-io",] }
mime_guess = "2.0.5"
resolver = "2"

View File

@@ -14,11 +14,11 @@ RUN rustup target add aarch64-unknown-linux-musl
# Copy only necessary files for building
COPY Cargo.toml Cargo.lock ./
COPY src ./src
COPY crates ./crates
COPY assets ./assets
# Build with musl target for static linking
RUN cargo build --release --target aarch64-unknown-linux-musl && \
RUN cargo build -p agent-server --release --target aarch64-unknown-linux-musl && \
strip /build-context/target/aarch64-unknown-linux-musl/release/agent-server
# Stage 2: Build Bun dependencies

View File

@@ -1,12 +1,69 @@
# open-web-agent-rs
A Rust-based web agent with an embedded openai compatible inference server (supports gemma models only).
A Rust-based web agent with an embedded OpenAI-compatible inference server (supports Gemma models only).
## Project Structure
This project is organized as a Cargo workspace with the following crates:
- `agent-server`: The main web agent server
- `local_inference_engine`: An embedded OpenAI-compatible inference server for Gemma models
## Setup
1. Clone the repository
2. Copy the example environment file:
```bash
cp .env.example .env
```
3. Install JavaScript dependencies:
```bash
bun i
```
4. Start the SearXNG search engine:
```bash
docker compose up -d searxng
```
## Running the Project
### Local Inference Engine
To run the local inference engine:
```bash
cd crates/local_inference_engine
cargo run --release -- --server
```
### Agent Server
To run the agent server:
```bash
cargo run -p agent-server
```
### Development Mode
For development with automatic reloading:
## Quickstart
```bash
cp .env.example .env
bun i
(cd local_inference_server && cargo run --release -- --server)
docker compose up -d searxng
bun dev
```
## Building
To build all crates in the workspace:
```bash
cargo build
```
To build a specific crate:
```bash
cargo build -p agent-server
# or
cargo build -p local_inference_engine
```

View File

@@ -14,11 +14,11 @@ RUN rustup target add x86_64-unknown-linux-musl
# Copy only necessary files for building
COPY Cargo.toml Cargo.lock ./
COPY src ./src
COPY crates ./crates
COPY assets ./assets
# Build with musl target for static linking
RUN cargo build --release --target x86_64-unknown-linux-musl && \
RUN cargo build -p agent-server --release --target x86_64-unknown-linux-musl && \
strip /build-context/target/x86_64-unknown-linux-musl/release/agent-server
# Stage 2: Build Bun dependencies

View File

@@ -0,0 +1,36 @@
[package]
name = "agent-server"
version = "0.1.0"
edition = "2021"
license = "MIT"
[[bin]]
edition = "2021"
name = "agent-server"
path = "src/main.rs"
[dependencies]
axum = { version = "0.8", features = ["multipart"] }
serde = { version = "1.0", features = ["derive"] }
tokio = { version = "1.0", features = ["full"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
http = "1.1.0"
tokio-stream = "0.1.16"
uuid = { version = "1.11.0", features = ["v4"] }
tokio-util = { version = "0.7", features = ["io"] }
serde_json = "1.0.133"
futures = "0.3.31"
dotenv = "0.15.0"
shell-escape = "0.1.5"
rust-embed = "8.5.0"
bytes = "1.8.0"
lazy_static = "1.5.0"
sled = "0.34.7"
tower-http = { version = "0.6.2", features = ["trace", "cors"] }
tower = "0.5.2"
anyhow = "1.0.97"
base64 = "0.22.1"
fips204 = "0.4.6"
rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", branch = "main", features = ["server", "transport-streamable-http-server", "transport-sse-server", "transport-io",] }
mime_guess = "2.0.5"

View File

@@ -13,7 +13,7 @@ use crate::agents::Agents;
#[derive(Embed)]
#[folder = "./node_modules/@modelcontextprotocol/inspector-client/dist"]
#[folder = "../../node_modules/@modelcontextprotocol/inspector-client/dist"]
struct Asset;
pub struct StaticFile<T>(pub T);

View File

@@ -30,8 +30,7 @@ A Rust-based inference engine for running large language models locally. This to
2. Build the local inference engine:
```bash
cd local_inference_engine
cargo build --release
cargo build -p local_inference_engine --release
```
## Usage
@@ -41,7 +40,7 @@ A Rust-based inference engine for running large language models locally. This to
Run the inference engine in CLI mode to generate text directly:
```bash
cargo run --release -- --prompt "Your prompt text here" --which 3-1b-it
cargo run -p local_inference_engine --release -- --prompt "Your prompt text here" --which 3-1b-it
```
#### CLI Options
@@ -63,7 +62,7 @@ cargo run --release -- --prompt "Your prompt text here" --which 3-1b-it
Run the inference engine in server mode to expose an OpenAI-compatible API:
```bash
cargo run --release -- --server --port 3777 --which 3-1b-it
cargo run -p local_inference_engine --release -- --server --port 3777 --which 3-1b-it
```
This starts a web server on the specified port (default: 3777) with an OpenAI-compatible chat completions endpoint.
@@ -204,4 +203,4 @@ main();
## License
This project is licensed under the terms specified in the LICENSE file.
This project is licensed under the terms specified in the LICENSE file.

View File

@@ -7,7 +7,7 @@
"private": true,
"scripts": {
"clean": "rm -rf .genaiscript && rm -rf dist && rm -rf node_modules && rm -rf open-web-agent-rs && rm -rf target && rm -rf packages/genaiscript-rust-shim/dist",
"dev": "bun i && ./killport.js 3006 && bun run build && cargo watch -x 'run src/main.rs'",
"dev": "bun i && ./killport.js 3006 && bun run build && cargo watch -x 'run -p agent-server'",
"start": "docker compose up --build",
"ai:search": "genaiscript run packages/genaiscript/genaisrc/web-search.genai.mts --vars USER_INPUT='who won the 2024 election?'",
"shim:ai:search": "pnpm build && ./packages/genaiscript-rust-shim/dist/genaiscript-rust-shim.js --file=packages/genaiscript/genaisrc/web-search.genai.mts USER_INPUT=\"Who won the 2024 presidential election?\"\n",
@@ -17,7 +17,7 @@
"prod:logs": "fly logs",
"test-http": "test/test-search.ts",
"mcp-inspector": "bunx @modelcontextprotocol/inspector",
"build": "(cd packages/genaiscript-rust-shim && bun run buildShim && bun run setupDev && cargo build)"
"build": "(cd packages/genaiscript-rust-shim && bun run buildShim && bun run setupDev && cargo build -p agent-server)"
},
"dependencies": {
"@modelcontextprotocol/inspector": "^0.14.0"