change semantics

Update README deployment steps and add deploy:secrets script to package.json

update local inference script and README

update lockfile

reconfigure package scripts for development

update test execution

pass server tests

Update README with revised Bun commands and workspace details

remove pnpm package manager designator

create bun server
This commit is contained in:
geoffsee
2025-06-02 18:41:16 -04:00
committed by Geoff Seemueller
parent 1055cda2f1
commit 497eb22ad8
218 changed files with 1273 additions and 4987 deletions

34
packages/scripts/.gitignore vendored Normal file
View File

@@ -0,0 +1,34 @@
# dependencies (bun install)
node_modules
# output
out
dist
*.tgz
# code coverage
coverage
*.lcov
# logs
logs
_.log
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# caches
.eslintcache
.cache
*.tsbuildinfo
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store

View File

@@ -0,0 +1,15 @@
# scripts
To install dependencies:
```bash
bun install
```
To run:
```bash
bun run
```
This project was created using `bun init` in bun v1.2.8. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.

24
packages/scripts/cleanup.sh Executable file
View File

@@ -0,0 +1,24 @@
#!/usr/bin/env bash
# Clean up build artifacts and temporary directories
echo "Cleaning up build artifacts and temporary directories..."
# Remove persisted data
find . -name ".open-gsio" -type d -prune -exec rm -rf {} \;
# Remove node_modules directories
find . -name "node_modules" -type d -prune -exec rm -rf {} \;
# Remove .wrangler directories
find . -name ".wrangler" -type d -prune -exec rm -rf {} \;
# Remove build directories
find . -name "dist" -type d -prune -exec rm -rf {} \;
find . -name "build" -type d -prune -exec rm -rf {} \;
# Remove coverage directories
find . -name "coverage" -type d -prune -exec rm -rf {} \;
find . -name "html" -type d -prune -exec rm -rf {} \;
echo "Cleanup complete!"

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
# Set REPO_ROOT to the directory containing this script, then go up two levels to repo root
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
WRANGLER_SERVER_PATH=packages/cloudflare-workers/open-gsio
BUN_SERVER_PATH=packages/server
# Set path to .dev.vars file in server directory
DEV_VARS_PATH="${REPO_ROOT}/${WRANGLER_SERVER_PATH}/.dev.vars"
ENV_LOCAL_PATH="${REPO_ROOT}/${BUN_SERVER_PATH}/.env"
# Function to configure .dev.vars with the specified API key and endpoint
configure_dev_vars() {
local endpoint_url=$1
local api_key="required-but-not-used"
echo "Configuring .dev.vars for endpoint: ${endpoint_url}"
# Configure OPENAI_API_KEY
# 1. Remove any existing OPENAI_API_KEY line
sed -i '' '/^OPENAI_API_KEY=/d' "${DEV_VARS_PATH}"
sed -i '' '/^OPENAI_API_KEY=/d' "${ENV_LOCAL_PATH}"
# 2. Append a blank line (ensures the new variable is on a new line and adds spacing)
# 3. Append the new OPENAI_API_KEY line
echo "OPENAI_API_KEY=${api_key}" >> "${DEV_VARS_PATH}"
echo "OPENAI_API_KEY=${api_key}" >> "${ENV_LOCAL_PATH}"
# Configure OPENAI_API_ENDPOINT
# 1. Remove any existing OPENAI_API_ENDPOINT line
sed -i '' '/^OPENAI_API_ENDPOINT=/d' "${DEV_VARS_PATH}"
sed -i '' '/^OPENAI_API_ENDPOINT=/d' "${ENV_LOCAL_PATH}"
# 3. Append the new OPENAI_API_ENDPOINT line
echo "OPENAI_API_ENDPOINT=${endpoint_url}" >> "${DEV_VARS_PATH}"
echo "OPENAI_API_ENDPOINT=${endpoint_url}" >> "${ENV_LOCAL_PATH}"
echo "Local inference is configured for $endpoint_url"
}
echo "Checking for local inference services..."
# Check for Ollama on port 11434
# nc -z -w1 localhost 11434:
# -z: Zero-I/O mode (port scanning)
# -w1: Timeout after 1 second
# >/dev/null 2>&1: Suppress output from nc
if nc -z -w1 localhost 11434 >/dev/null 2>&1; then
echo "Ollama service detected on port 11434."
configure_dev_vars "http://localhost:11434"
# Else, check for mlx-omni-server on port 10240
elif nc -z -w1 localhost 10240 >/dev/null 2>&1; then
echo "mlx-omni-server service detected on port 10240."
configure_dev_vars "http://localhost:10240"
else
echo "No active local inference service (Ollama or mlx-omni-server) found on default ports (11434, 10240)."
echo "If a service is running on a different port, .dev.vars may need manual configuration."
echo ".dev.vars was not modified by this script for OpenAI local inference settings."
fi
echo "Script finished."

View File

@@ -0,0 +1,51 @@
#!/usr/bin/env node
import * as child_process from "node:child_process";
const args = process.argv.slice(2);
const port = args.length > 0 ? parseInt(args[0], 10) : null;
if (!port || isNaN(port)) {
console.error('Please provide a valid port number');
process.exit(1);
}
export const killProcessOnPort = (port) => {
return new Promise((resolve, reject) => {
// Find the PID of the process using the specified port
child_process.exec(`lsof -t -i :${port}`.trim(), (err, stdout) => {
if (err) {
// Handle command error (such as permission denied)
if (err.code !== 1) {
console.error(`Error finding process on port ${port}:`, err);
return reject(err);
} else {
// If code is 1, it generally means no process is using the port
console.log(`No process found on port ${port}`);
return resolve();
}
}
// If stdout is empty, no process is using the port
const pid = stdout.trim();
if (!pid) {
console.log(`No process is currently running on port ${port}`);
return resolve();
}
// Kill the process using the specified PID
child_process.exec(`kill -9 ${pid}`.trim(), (killErr) => {
if (killErr) {
console.error(`Failed to kill process ${pid} on port ${port}`, killErr);
return reject(killErr);
}
console.log(`Successfully killed process ${pid} on port ${port}`);
resolve();
});
});
});
};
await killProcessOnPort(port);

View File

@@ -0,0 +1,10 @@
{
"name": "@open-gsio/scripts",
"private": true,
"devDependencies": {
"@types/bun": "latest"
},
"peerDependencies": {
"typescript": "^5"
}
}

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env bash
if [ "$1" = "mlx-omni-server" ]; then
printf "Starting Inference Server: %s\n" "$1"
mlx-omni-server --log-level debug
elif [ "$1" = "ollama" ]; then
echo "starting ollama"
docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
else
printf "Error: First argument must be 'mlx-omni-server'\n"
exit 1
fi

View File

@@ -0,0 +1,28 @@
{
"compilerOptions": {
// Environment setup & latest features
"lib": ["ESNext"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
// Best practices
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedIndexedAccess": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
}
}

View File

@@ -0,0 +1,3 @@
#!/usr/bin/env bash
gh workflow run "Update VPN Blocklist"