update project structure
This commit is contained in:
4880
Cargo.lock
generated
4880
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
41
Cargo.toml
41
Cargo.toml
@@ -1,36 +1,7 @@
|
|||||||
[package]
|
[workspace]
|
||||||
name = "open-web-agent-rs"
|
members = [
|
||||||
version = "0.1.0"
|
"crates/agent-server",
|
||||||
edition = "2021"
|
"crates/local_inference_engine",
|
||||||
license = "MIT"
|
]
|
||||||
|
|
||||||
[[bin]]
|
resolver = "2"
|
||||||
edition = "2021"
|
|
||||||
name = "agent-server"
|
|
||||||
path = "src/main.rs"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
axum = { version = "0.8", features = ["multipart"] }
|
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
|
||||||
tokio = { version = "1.0", features = ["full"] }
|
|
||||||
tracing = "0.1"
|
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
|
||||||
http = "1.1.0"
|
|
||||||
tokio-stream = "0.1.16"
|
|
||||||
uuid = { version = "1.11.0", features = ["v4"] }
|
|
||||||
tokio-util = { version = "0.7", features = ["io"] }
|
|
||||||
serde_json = "1.0.133"
|
|
||||||
futures = "0.3.31"
|
|
||||||
dotenv = "0.15.0"
|
|
||||||
shell-escape = "0.1.5"
|
|
||||||
rust-embed = "8.5.0"
|
|
||||||
bytes = "1.8.0"
|
|
||||||
lazy_static = "1.5.0"
|
|
||||||
sled = "0.34.7"
|
|
||||||
tower-http = { version = "0.6.2", features = ["trace", "cors"] }
|
|
||||||
tower = "0.5.2"
|
|
||||||
anyhow = "1.0.97"
|
|
||||||
base64 = "0.22.1"
|
|
||||||
fips204 = "0.4.6"
|
|
||||||
rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", branch = "main", features = ["server", "transport-streamable-http-server", "transport-sse-server", "transport-io",] }
|
|
||||||
mime_guess = "2.0.5"
|
|
||||||
|
@@ -14,11 +14,11 @@ RUN rustup target add aarch64-unknown-linux-musl
|
|||||||
|
|
||||||
# Copy only necessary files for building
|
# Copy only necessary files for building
|
||||||
COPY Cargo.toml Cargo.lock ./
|
COPY Cargo.toml Cargo.lock ./
|
||||||
COPY src ./src
|
COPY crates ./crates
|
||||||
COPY assets ./assets
|
COPY assets ./assets
|
||||||
|
|
||||||
# Build with musl target for static linking
|
# Build with musl target for static linking
|
||||||
RUN cargo build --release --target aarch64-unknown-linux-musl && \
|
RUN cargo build -p agent-server --release --target aarch64-unknown-linux-musl && \
|
||||||
strip /build-context/target/aarch64-unknown-linux-musl/release/agent-server
|
strip /build-context/target/aarch64-unknown-linux-musl/release/agent-server
|
||||||
|
|
||||||
# Stage 2: Build Bun dependencies
|
# Stage 2: Build Bun dependencies
|
||||||
|
69
README.md
69
README.md
@@ -1,12 +1,69 @@
|
|||||||
# open-web-agent-rs
|
# open-web-agent-rs
|
||||||
|
|
||||||
A Rust-based web agent with an embedded openai compatible inference server (supports gemma models only).
|
A Rust-based web agent with an embedded OpenAI-compatible inference server (supports Gemma models only).
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
This project is organized as a Cargo workspace with the following crates:
|
||||||
|
|
||||||
|
- `agent-server`: The main web agent server
|
||||||
|
- `local_inference_engine`: An embedded OpenAI-compatible inference server for Gemma models
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
1. Clone the repository
|
||||||
|
2. Copy the example environment file:
|
||||||
|
```bash
|
||||||
|
cp .env.example .env
|
||||||
|
```
|
||||||
|
3. Install JavaScript dependencies:
|
||||||
|
```bash
|
||||||
|
bun i
|
||||||
|
```
|
||||||
|
4. Start the SearXNG search engine:
|
||||||
|
```bash
|
||||||
|
docker compose up -d searxng
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running the Project
|
||||||
|
|
||||||
|
### Local Inference Engine
|
||||||
|
|
||||||
|
To run the local inference engine:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd crates/local_inference_engine
|
||||||
|
cargo run --release -- --server
|
||||||
|
```
|
||||||
|
|
||||||
|
### Agent Server
|
||||||
|
|
||||||
|
To run the agent server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo run -p agent-server
|
||||||
|
```
|
||||||
|
|
||||||
|
### Development Mode
|
||||||
|
|
||||||
|
For development with automatic reloading:
|
||||||
|
|
||||||
## Quickstart
|
|
||||||
```bash
|
```bash
|
||||||
cp .env.example .env
|
|
||||||
bun i
|
|
||||||
(cd local_inference_server && cargo run --release -- --server)
|
|
||||||
docker compose up -d searxng
|
|
||||||
bun dev
|
bun dev
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Building
|
||||||
|
|
||||||
|
To build all crates in the workspace:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo build
|
||||||
|
```
|
||||||
|
|
||||||
|
To build a specific crate:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo build -p agent-server
|
||||||
|
# or
|
||||||
|
cargo build -p local_inference_engine
|
||||||
|
```
|
||||||
|
@@ -14,11 +14,11 @@ RUN rustup target add x86_64-unknown-linux-musl
|
|||||||
|
|
||||||
# Copy only necessary files for building
|
# Copy only necessary files for building
|
||||||
COPY Cargo.toml Cargo.lock ./
|
COPY Cargo.toml Cargo.lock ./
|
||||||
COPY src ./src
|
COPY crates ./crates
|
||||||
COPY assets ./assets
|
COPY assets ./assets
|
||||||
|
|
||||||
# Build with musl target for static linking
|
# Build with musl target for static linking
|
||||||
RUN cargo build --release --target x86_64-unknown-linux-musl && \
|
RUN cargo build -p agent-server --release --target x86_64-unknown-linux-musl && \
|
||||||
strip /build-context/target/x86_64-unknown-linux-musl/release/agent-server
|
strip /build-context/target/x86_64-unknown-linux-musl/release/agent-server
|
||||||
|
|
||||||
# Stage 2: Build Bun dependencies
|
# Stage 2: Build Bun dependencies
|
||||||
|
36
crates/agent-server/Cargo.toml
Normal file
36
crates/agent-server/Cargo.toml
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
[package]
|
||||||
|
name = "agent-server"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
edition = "2021"
|
||||||
|
name = "agent-server"
|
||||||
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
axum = { version = "0.8", features = ["multipart"] }
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
tokio = { version = "1.0", features = ["full"] }
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||||
|
http = "1.1.0"
|
||||||
|
tokio-stream = "0.1.16"
|
||||||
|
uuid = { version = "1.11.0", features = ["v4"] }
|
||||||
|
tokio-util = { version = "0.7", features = ["io"] }
|
||||||
|
serde_json = "1.0.133"
|
||||||
|
futures = "0.3.31"
|
||||||
|
dotenv = "0.15.0"
|
||||||
|
shell-escape = "0.1.5"
|
||||||
|
rust-embed = "8.5.0"
|
||||||
|
bytes = "1.8.0"
|
||||||
|
lazy_static = "1.5.0"
|
||||||
|
sled = "0.34.7"
|
||||||
|
tower-http = { version = "0.6.2", features = ["trace", "cors"] }
|
||||||
|
tower = "0.5.2"
|
||||||
|
anyhow = "1.0.97"
|
||||||
|
base64 = "0.22.1"
|
||||||
|
fips204 = "0.4.6"
|
||||||
|
rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", branch = "main", features = ["server", "transport-streamable-http-server", "transport-sse-server", "transport-io",] }
|
||||||
|
mime_guess = "2.0.5"
|
@@ -13,7 +13,7 @@ use crate::agents::Agents;
|
|||||||
|
|
||||||
|
|
||||||
#[derive(Embed)]
|
#[derive(Embed)]
|
||||||
#[folder = "./node_modules/@modelcontextprotocol/inspector-client/dist"]
|
#[folder = "../../node_modules/@modelcontextprotocol/inspector-client/dist"]
|
||||||
struct Asset;
|
struct Asset;
|
||||||
|
|
||||||
pub struct StaticFile<T>(pub T);
|
pub struct StaticFile<T>(pub T);
|
@@ -30,8 +30,7 @@ A Rust-based inference engine for running large language models locally. This to
|
|||||||
|
|
||||||
2. Build the local inference engine:
|
2. Build the local inference engine:
|
||||||
```bash
|
```bash
|
||||||
cd local_inference_engine
|
cargo build -p local_inference_engine --release
|
||||||
cargo build --release
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
@@ -41,7 +40,7 @@ A Rust-based inference engine for running large language models locally. This to
|
|||||||
Run the inference engine in CLI mode to generate text directly:
|
Run the inference engine in CLI mode to generate text directly:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cargo run --release -- --prompt "Your prompt text here" --which 3-1b-it
|
cargo run -p local_inference_engine --release -- --prompt "Your prompt text here" --which 3-1b-it
|
||||||
```
|
```
|
||||||
|
|
||||||
#### CLI Options
|
#### CLI Options
|
||||||
@@ -63,7 +62,7 @@ cargo run --release -- --prompt "Your prompt text here" --which 3-1b-it
|
|||||||
Run the inference engine in server mode to expose an OpenAI-compatible API:
|
Run the inference engine in server mode to expose an OpenAI-compatible API:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cargo run --release -- --server --port 3777 --which 3-1b-it
|
cargo run -p local_inference_engine --release -- --server --port 3777 --which 3-1b-it
|
||||||
```
|
```
|
||||||
|
|
||||||
This starts a web server on the specified port (default: 3777) with an OpenAI-compatible chat completions endpoint.
|
This starts a web server on the specified port (default: 3777) with an OpenAI-compatible chat completions endpoint.
|
@@ -7,7 +7,7 @@
|
|||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"clean": "rm -rf .genaiscript && rm -rf dist && rm -rf node_modules && rm -rf open-web-agent-rs && rm -rf target && rm -rf packages/genaiscript-rust-shim/dist",
|
"clean": "rm -rf .genaiscript && rm -rf dist && rm -rf node_modules && rm -rf open-web-agent-rs && rm -rf target && rm -rf packages/genaiscript-rust-shim/dist",
|
||||||
"dev": "bun i && ./killport.js 3006 && bun run build && cargo watch -x 'run src/main.rs'",
|
"dev": "bun i && ./killport.js 3006 && bun run build && cargo watch -x 'run -p agent-server'",
|
||||||
"start": "docker compose up --build",
|
"start": "docker compose up --build",
|
||||||
"ai:search": "genaiscript run packages/genaiscript/genaisrc/web-search.genai.mts --vars USER_INPUT='who won the 2024 election?'",
|
"ai:search": "genaiscript run packages/genaiscript/genaisrc/web-search.genai.mts --vars USER_INPUT='who won the 2024 election?'",
|
||||||
"shim:ai:search": "pnpm build && ./packages/genaiscript-rust-shim/dist/genaiscript-rust-shim.js --file=packages/genaiscript/genaisrc/web-search.genai.mts USER_INPUT=\"Who won the 2024 presidential election?\"\n",
|
"shim:ai:search": "pnpm build && ./packages/genaiscript-rust-shim/dist/genaiscript-rust-shim.js --file=packages/genaiscript/genaisrc/web-search.genai.mts USER_INPUT=\"Who won the 2024 presidential election?\"\n",
|
||||||
@@ -17,7 +17,7 @@
|
|||||||
"prod:logs": "fly logs",
|
"prod:logs": "fly logs",
|
||||||
"test-http": "test/test-search.ts",
|
"test-http": "test/test-search.ts",
|
||||||
"mcp-inspector": "bunx @modelcontextprotocol/inspector",
|
"mcp-inspector": "bunx @modelcontextprotocol/inspector",
|
||||||
"build": "(cd packages/genaiscript-rust-shim && bun run buildShim && bun run setupDev && cargo build)"
|
"build": "(cd packages/genaiscript-rust-shim && bun run buildShim && bun run setupDev && cargo build -p agent-server)"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@modelcontextprotocol/inspector": "^0.14.0"
|
"@modelcontextprotocol/inspector": "^0.14.0"
|
||||||
|
Reference in New Issue
Block a user