fix format error

This commit is contained in:
geoffsee
2025-08-31 19:59:09 -04:00
parent 8d2b85b0b9
commit d1a7d5b28e
5 changed files with 34 additions and 43 deletions

14
Cargo.lock generated
View File

@@ -893,7 +893,7 @@ checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
[[package]]
name = "cli"
version = "0.1.2"
version = "0.1.3"
[[package]]
name = "codee"
@@ -1471,7 +1471,7 @@ dependencies = [
[[package]]
name = "embeddings-engine"
version = "0.1.2"
version = "0.1.3"
dependencies = [
"async-openai",
"axum",
@@ -2093,7 +2093,7 @@ dependencies = [
[[package]]
name = "gemma-runner"
version = "0.1.2"
version = "0.1.3"
dependencies = [
"anyhow",
"candle-core 0.9.1 (git+https://github.com/huggingface/candle.git)",
@@ -2274,7 +2274,7 @@ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "helm-chart-tool"
version = "0.1.2"
version = "0.1.3"
dependencies = [
"anyhow",
"clap",
@@ -2684,7 +2684,7 @@ dependencies = [
[[package]]
name = "inference-engine"
version = "0.1.2"
version = "0.1.3"
dependencies = [
"ab_glyph",
"anyhow",
@@ -3177,7 +3177,7 @@ checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
[[package]]
name = "llama-runner"
version = "0.1.2"
version = "0.1.3"
dependencies = [
"anyhow",
"candle-core 0.9.1 (git+https://github.com/huggingface/candle.git)",
@@ -4056,7 +4056,7 @@ dependencies = [
[[package]]
name = "predict-otron-9000"
version = "0.1.2"
version = "0.1.3"
dependencies = [
"axum",
"chat-ui",

View File

@@ -29,7 +29,7 @@ cd crates/chat-ui
This starts the development server on port 8788 with auto-reload capabilities.
### Usage
1. Start the predict-otron-9000 server: `./scripts/run_server.sh`
1. Start the predict-otron-9000 server: `./scripts/run.sh`
2. Start the chat-ui: `cd crates/chat-ui && ./run.sh`
3. Navigate to `http://localhost:8788`
4. Start chatting with your AI models!

View File

@@ -12,7 +12,6 @@ pub struct AppConfig {
impl Default for AppConfig {
fn default() -> Self {
let conf = get_configuration(Some(concat!(env!("CARGO_MANIFEST_DIR"), "/Cargo.toml")))
.expect("failed to read config");
@@ -41,6 +40,7 @@ pub fn create_router(leptos_options: LeptosOptions) -> Router {
.with_state(leptos_options)
}
use gloo_net::http::Request;
use leptos::prelude::*;
use leptos_meta::{provide_meta_context, MetaTags, Stylesheet, Title};
use leptos_router::{
@@ -48,7 +48,6 @@ use leptos_router::{
StaticSegment,
};
use serde::{Deserialize, Serialize};
use gloo_net::http::Request;
use web_sys::console;
// Remove spawn_local import as we'll use different approach
@@ -122,7 +121,10 @@ pub async fn fetch_models() -> Result<Vec<ModelInfo>, String> {
}
// API client function to send chat completion requests
pub async fn send_chat_completion(messages: Vec<ChatMessage>, model: String) -> Result<String, String> {
pub async fn send_chat_completion(
messages: Vec<ChatMessage>,
model: String,
) -> Result<String, String> {
let request = ChatRequest {
model,
messages,

View File

@@ -1,14 +1,11 @@
#[cfg(feature = "ssr")]
#[tokio::main]
async fn main() {
use axum::Router;
use chat_ui::app::*;
use leptos::logging::log;
use leptos::prelude::*;
use leptos_axum::{generate_route_list, LeptosRoutes};
use chat_ui::app::*;
let conf = get_configuration(None).expect("failed to read config");
let addr = conf.leptos_options.site_addr;

View File

@@ -4,27 +4,27 @@ mod middleware;
mod standalone_mode;
use crate::standalone_mode::create_standalone_router;
use axum::handler::Handler;
use axum::http::StatusCode as AxumStatusCode;
use axum::http::header;
use axum::response::IntoResponse;
use axum::routing::get;
use axum::{Router, http::Uri, response::Html, serve, ServiceExt};
use axum::{Router, ServiceExt, http::Uri, response::Html, serve};
use config::ServerConfig;
use ha_mode::create_ha_router;
use inference_engine::AppState;
use log::info;
use middleware::{MetricsLayer, MetricsLoggerFuture, MetricsStore};
use mime_guess::from_path;
use rust_embed::Embed;
use std::env;
use std::path::Component::ParentDir;
use axum::handler::Handler;
use axum::http::header;
use mime_guess::from_path;
use tokio::net::TcpListener;
use tower::MakeService;
use tower_http::classify::ServerErrorsFailureClass::StatusCode;
use tower_http::cors::{Any, CorsLayer};
use tower_http::trace::TraceLayer;
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
use axum::http::{StatusCode as AxumStatusCode };
use log::info;
#[derive(Embed)]
#[folder = "../../target/site"]
@@ -34,7 +34,6 @@ use log::info;
#[include = "*.ico"]
struct Asset;
async fn static_handler(uri: Uri) -> axum::response::Response {
// Strip the leading `/`
let path = uri.path().trim_start_matches('/');
@@ -49,18 +48,12 @@ async fn static_handler(uri: Uri) -> axum::response::Response {
let body = content.data.into_owned();
let mime = from_path(path).first_or_octet_stream();
(
[(header::CONTENT_TYPE, mime.as_ref())],
body,
)
.into_response()
([(header::CONTENT_TYPE, mime.as_ref())], body).into_response()
}
None => (AxumStatusCode::NOT_FOUND, "404 Not Found").into_response(),
}
}
#[tokio::main]
async fn main() {
// Initialize tracing
@@ -123,7 +116,6 @@ async fn main() {
// Create the leptos router for the web frontend
let leptos_router = chat_ui::app::create_router(leptos_config.config.leptos_options);
// Merge the service router with base routes and add middleware layers
let app = Router::new()
.route("/pkg/{*path}", get(static_handler))