mirror of
https://github.com/seemueller-io/cluster.git
synced 2025-09-08 22:56:46 +00:00
Development environment functions
This commit is contained in:
@@ -0,0 +1,38 @@
|
||||
FROM rust:1-slim-bookworm as build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install build dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
clang \
|
||||
build-essential \
|
||||
pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add wasm32 target for Cloudflare Workers
|
||||
RUN rustup target add wasm32-unknown-unknown
|
||||
|
||||
# Copy project files
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
COPY wrangler.jsonc ./
|
||||
COPY src/ ./src/
|
||||
|
||||
# Install worker-build and build the project
|
||||
RUN cargo install -q worker-build && worker-build --release
|
||||
|
||||
FROM node:20-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install wrangler
|
||||
RUN npm install -g wrangler
|
||||
|
||||
# Copy built files from build stage
|
||||
COPY --from=build /app/build ./build
|
||||
COPY --from=build /app/wrangler.jsonc ./
|
||||
|
||||
EXPOSE 8787
|
||||
|
||||
HEALTHCHECK CMD curl --fail http://localhost:8787 || exit 1
|
||||
|
||||
ENTRYPOINT ["wrangler", "dev"]
|
3
packages/example-apps/example-node-service/.dockerignore
Normal file
3
packages/example-apps/example-node-service/.dockerignore
Normal file
@@ -0,0 +1,3 @@
|
||||
/node_modules
|
||||
/dist
|
||||
/.wrangler
|
@@ -0,0 +1,13 @@
|
||||
FROM node:20-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY deploy/example-service .
|
||||
|
||||
RUN npm install --production
|
||||
|
||||
EXPOSE 8787
|
||||
|
||||
HEALTHCHECK CMD curl --fail http://localhost:8787 || exit 1
|
||||
|
||||
ENTRYPOINT ["npm", "run", "dev"]
|
1
packages/example-apps/example-node-service/app.env
Normal file
1
packages/example-apps/example-node-service/app.env
Normal file
@@ -0,0 +1 @@
|
||||
SOME_USELESS_ENV_VAR_TO_TEST_FUNCTIONALITY=completely-useless-value
|
36
packages/example-apps/example-node-service/main.ts
Normal file
36
packages/example-apps/example-node-service/main.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import {parse} from "cookie";
|
||||
|
||||
export default {
|
||||
async fetch(request): Promise<Response> {
|
||||
// The name of the cookie
|
||||
const COOKIE_NAME = "session";
|
||||
const cookie = parse(request.headers.get("Cookie") || "");
|
||||
if (cookie[COOKIE_NAME] != null) {
|
||||
// Respond with the cookie value
|
||||
return new Response(`
|
||||
<html>
|
||||
<body>
|
||||
<h1>Cookie Status</h1>
|
||||
<p>Cookie '${COOKIE_NAME}' exists with value: ${cookie[COOKIE_NAME]}</p>
|
||||
</body>
|
||||
</html>
|
||||
`, {
|
||||
headers: {
|
||||
"Content-Type": "text/html"
|
||||
}
|
||||
});
|
||||
}
|
||||
return new Response(`
|
||||
<html>
|
||||
<body>
|
||||
<h1>Cookie Status</h1>
|
||||
<p>No cookie found <w></w>ith name: ${COOKIE_NAME}</p>
|
||||
</body>
|
||||
</html>
|
||||
`, {
|
||||
headers: {
|
||||
"Content-Type": "text/html"
|
||||
}
|
||||
});
|
||||
},
|
||||
} satisfies ExportedHandler;
|
15
packages/example-apps/example-node-service/package.json
Normal file
15
packages/example-apps/example-node-service/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "example-service",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "wrangler dev",
|
||||
"build": "wrangler build",
|
||||
"deploy": "wrangler deploy"
|
||||
},
|
||||
"dependencies": {
|
||||
"wrangler": "latest"
|
||||
},
|
||||
"devDependencies": {
|
||||
"cookie": "^1.0.2"
|
||||
}
|
||||
}
|
18
packages/example-apps/example-node-service/tsconfig.json
Normal file
18
packages/example-apps/example-node-service/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"lib": [
|
||||
"ESNext",
|
||||
],
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"noEmit": true,
|
||||
}
|
||||
}
|
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"compatibility_date": "2025-08-07",
|
||||
"main": "main.ts",
|
||||
"name": "example-service",
|
||||
"dev": {
|
||||
"ip": "0.0.0.0"
|
||||
}
|
||||
}
|
@@ -0,0 +1,38 @@
|
||||
FROM rust:1-slim-bookworm as build
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install build dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
clang \
|
||||
build-essential \
|
||||
pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Add wasm32 target for Cloudflare Workers
|
||||
RUN rustup target add wasm32-unknown-unknown
|
||||
|
||||
# Copy project files
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
COPY wrangler.jsonc ./
|
||||
COPY src/ ./src/
|
||||
|
||||
# Install worker-build and build the project
|
||||
RUN cargo install -q worker-build && worker-build --release
|
||||
|
||||
FROM node:20-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install wrangler
|
||||
RUN npm install -g wrangler
|
||||
|
||||
# Copy built files from build stage
|
||||
COPY --from=build /app/build ./build
|
||||
COPY --from=build /app/wrangler.jsonc ./
|
||||
|
||||
EXPOSE 8787
|
||||
|
||||
HEALTHCHECK CMD curl --fail http://localhost:8787 || exit 1
|
||||
|
||||
ENTRYPOINT ["wrangler", "dev"]
|
24
packages/localhost-proxy/Cargo.toml
Normal file
24
packages/localhost-proxy/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "localhost-proxy"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
authors = ["Geoff Seemueller <28698553+geoffsee@users.noreply.github.com>"]
|
||||
|
||||
[[bin]]
|
||||
name = "localhost-proxy"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
axum = { version = "0.7.9", features = ["macros", "json", "query", "tracing"] }
|
||||
tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "net"] }
|
||||
reqwest = { version = "0.11.27", features = ["json", "rustls-tls"], default-features = false }
|
||||
tower = { version = "0.5.2", features = ["tokio", "tracing"] }
|
||||
tower-http = { version = "0.6.2", features = ["cors", "trace"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
http = "1.3.1"
|
||||
bytes = "1.9.0"
|
||||
thiserror = "1.0"
|
||||
anyhow = "1.0"
|
1
packages/localhost-proxy/README.md
Normal file
1
packages/localhost-proxy/README.md
Normal file
@@ -0,0 +1 @@
|
||||
This server brokers https requests to http to make development easier.
|
165
packages/localhost-proxy/src/main.rs
Normal file
165
packages/localhost-proxy/src/main.rs
Normal file
@@ -0,0 +1,165 @@
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::Request,
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Response},
|
||||
routing::any,
|
||||
Router,
|
||||
};
|
||||
use reqwest::Client;
|
||||
use tower_http::{cors::CorsLayer, trace::TraceLayer};
|
||||
use tracing::{error, info, instrument};
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
// Hardcoded proxy target URL - change this to your desired target
|
||||
const PROXY_TARGET: &str = "https://machine.127.0.0.1.sslip.io";
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
client: Client,
|
||||
target_url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
enum ProxyError {
|
||||
#[error("Request error: {0}")]
|
||||
RequestError(#[from] reqwest::Error),
|
||||
#[error("Invalid header value: {0}")]
|
||||
InvalidHeaderValue(#[from] http::header::InvalidHeaderValue),
|
||||
#[error("Invalid header name: {0}")]
|
||||
InvalidHeaderName(#[from] http::header::InvalidHeaderName),
|
||||
#[error("URI parse error: {0}")]
|
||||
UriError(#[from] http::uri::InvalidUri),
|
||||
#[error("HTTP error: {0}")]
|
||||
HttpError(#[from] http::Error),
|
||||
#[error("Axum error: {0}")]
|
||||
AxumError(String),
|
||||
#[error("Method conversion error")]
|
||||
MethodError,
|
||||
}
|
||||
|
||||
impl IntoResponse for ProxyError {
|
||||
fn into_response(self) -> Response {
|
||||
let status = StatusCode::BAD_GATEWAY;
|
||||
let body = format!("Proxy error: {}", self);
|
||||
error!("Proxy error: {}", self);
|
||||
(status, body).into_response()
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
// Initialize tracing
|
||||
tracing_subscriber::registry()
|
||||
.with(
|
||||
tracing_subscriber::EnvFilter::try_from_default_env()
|
||||
.unwrap_or_else(|_| "simple_proxy=debug,tower_http=debug,axum::rejection=trace".into()),
|
||||
)
|
||||
.with(tracing_subscriber::fmt::layer())
|
||||
.init();
|
||||
|
||||
// Create HTTP client
|
||||
let client = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.danger_accept_invalid_certs(true) // Accept self-signed certificates
|
||||
.build()?;
|
||||
|
||||
let state = AppState {
|
||||
client,
|
||||
target_url: PROXY_TARGET.to_string(),
|
||||
};
|
||||
|
||||
// Create router
|
||||
let app = Router::new()
|
||||
.route("/*path", any(proxy_handler))
|
||||
.route("/", any(proxy_handler))
|
||||
.layer(CorsLayer::permissive())
|
||||
.layer(TraceLayer::new_for_http())
|
||||
.with_state(state);
|
||||
|
||||
let listener = tokio::net::TcpListener::bind("127.0.0.1:3030").await?;
|
||||
|
||||
info!("Simple proxy server starting on http://127.0.0.1:3030");
|
||||
info!("Proxying requests to: {}", PROXY_TARGET);
|
||||
|
||||
axum::serve(listener, app).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip(state, request))]
|
||||
async fn proxy_handler(
|
||||
axum::extract::State(state): axum::extract::State<AppState>,
|
||||
request: Request,
|
||||
) -> Result<Response, ProxyError> {
|
||||
|
||||
let method = request.method().clone();
|
||||
let uri = request.uri().clone();
|
||||
let headers = request.headers().clone();
|
||||
let body = axum::body::to_bytes(request.into_body(), usize::MAX).await
|
||||
.map_err(|e| ProxyError::AxumError(e.to_string()))?;
|
||||
|
||||
// Build target URL
|
||||
let path_and_query = uri.path_and_query()
|
||||
.map(|pq| pq.as_str())
|
||||
.unwrap_or("/");
|
||||
|
||||
let target_url = format!("{}{}", state.target_url, path_and_query);
|
||||
|
||||
info!("Proxying {} {} to {}", method, uri, target_url);
|
||||
|
||||
// Create reqwest request - convert Method types between different http crate versions
|
||||
let reqwest_method = reqwest::Method::from_bytes(method.as_str().as_bytes())
|
||||
.map_err(|_| ProxyError::MethodError)?;
|
||||
let mut req_builder = state.client.request(reqwest_method, &target_url);
|
||||
|
||||
// Add headers (filter out problematic ones)
|
||||
for (name, value) in headers.iter() {
|
||||
let name_str = name.as_str();
|
||||
// Skip hop-by-hop headers and host header
|
||||
if !should_skip_header(name_str) {
|
||||
req_builder = req_builder.header(name.as_str(), value.as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
// Add body if present
|
||||
if !body.is_empty() {
|
||||
req_builder = req_builder.body(body.to_vec());
|
||||
}
|
||||
|
||||
// Execute request
|
||||
let response = req_builder.send().await?;
|
||||
|
||||
// Build response
|
||||
let status = response.status();
|
||||
let response_headers = response.headers().clone();
|
||||
let response_body = response.bytes().await?;
|
||||
|
||||
let mut builder = Response::builder().status(status.as_u16());
|
||||
|
||||
// Add response headers (filter out problematic ones)
|
||||
for (name, value) in response_headers.iter() {
|
||||
if !should_skip_response_header(name.as_str()) {
|
||||
builder = builder.header(name.as_str(), value.as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
let response = builder
|
||||
.body(Body::from(response_body))?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
fn should_skip_header(name: &str) -> bool {
|
||||
matches!(
|
||||
name.to_lowercase().as_str(),
|
||||
"connection" | "host" | "transfer-encoding" | "upgrade" | "proxy-connection"
|
||||
)
|
||||
}
|
||||
|
||||
fn should_skip_response_header(name: &str) -> bool {
|
||||
matches!(
|
||||
name.to_lowercase().as_str(),
|
||||
"connection" | "transfer-encoding" | "upgrade" | "proxy-connection"
|
||||
)
|
||||
}
|
54
packages/scripts/cleanup.sh
Executable file
54
packages/scripts/cleanup.sh
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "WARNING: This will remove all build artifacts, temporary directories, and cached files."
|
||||
echo -n "Are you sure you want to proceed? (y/N): "
|
||||
read -r response
|
||||
|
||||
if [[ ! "$response" =~ ^[Yy]$ ]]; then
|
||||
echo "Cleanup cancelled."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Clean up build artifacts and temporary directories
|
||||
echo "Cleaning up build artifacts and temporary directories..."
|
||||
|
||||
# Remove persisted data
|
||||
find . -name ".wrangler" -type d -prune -exec rm -rf {} \;
|
||||
|
||||
# Remove node_modules directories
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf {} \;
|
||||
|
||||
# Remove Rust stuff
|
||||
find . -name "target" -type d -prune -exec rm -rf {} \;
|
||||
|
||||
# Remove old builds
|
||||
find . -name "dist" -type d -prune -exec rm -rf {} \;
|
||||
find . -name "build" -type d -prune -exec rm -rf {} \;
|
||||
|
||||
# Remove CDKTF generated files
|
||||
find . -name ".gen" -type d -prune -exec rm -rf {} \;
|
||||
find . -name "cdktf.out" -type d -prune -exec rm -rf {} \;
|
||||
find . -name "*.out" -type f -exec rm -f {} \;
|
||||
|
||||
# Remove TypeScript build artifacts
|
||||
find . -name "*.tsbuildinfo" -type f -exec rm -f {} \;
|
||||
|
||||
# Remove Terraform artifacts
|
||||
find . -name "*.tfstate*" -type f -exec rm -f {} \;
|
||||
find . -name "*.lock.hcl" -type f -exec rm -f {} \;
|
||||
find . -name ".terraform" -type d -prune -exec rm -rf {} \;
|
||||
find . -name ".terraform.lock.hcl" -type f -exec rm -f {} \;
|
||||
|
||||
# Remove test and coverage outputs
|
||||
find . -name "coverage" -type d -prune -exec rm -rf {} \;
|
||||
find . -name ".nyc_output" -type d -prune -exec rm -rf {} \;
|
||||
|
||||
# Remove cache directories
|
||||
find . -name ".cache" -type d -prune -exec rm -rf {} \;
|
||||
find . -name ".turbo" -type d -prune -exec rm -rf {} \;
|
||||
find . -name ".next" -type d -prune -exec rm -rf {} \;
|
||||
|
||||
# Remove log files
|
||||
find . -name "*.log" -type f -exec rm -f {} \;
|
||||
|
||||
echo "Cleanup complete!"
|
5
packages/scripts/dev.sh
Normal file
5
packages/scripts/dev.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
(cd deploy/dev/cluster && bun run deploy)
|
||||
(cd deploy/dev/components && bun run deploy)
|
||||
(cd deploy/dev/configurations && bun run deploy)
|
14
packages/scripts/legacy/deploy.ts
Executable file
14
packages/scripts/legacy/deploy.ts
Executable file
@@ -0,0 +1,14 @@
|
||||
// Legacy: not used
|
||||
// Intended to run the entire deployment process and generate artifacts for client applications without requiring developer intervention
|
||||
|
||||
// #!/usr/bin/env bun
|
||||
//
|
||||
// import {execSync} from "child_process";
|
||||
//
|
||||
// function deployCdktf() {
|
||||
// execSync("cdktf deploy --auto-approve", {stdio: "inherit"})
|
||||
// execSync("./extract-outputs.ts", {stdio: "inherit"})
|
||||
// execSync("./update-vars.ts", {stdio: "inherit"})
|
||||
// }
|
||||
//
|
||||
// deployCdktf()
|
64
packages/scripts/legacy/extract-outputs.ts
Executable file
64
packages/scripts/legacy/extract-outputs.ts
Executable file
@@ -0,0 +1,64 @@
|
||||
// Legacy: not used
|
||||
|
||||
// #!/usr/bin/env bun
|
||||
//
|
||||
// import * as fs from 'fs';
|
||||
// import * as path from 'path';
|
||||
//
|
||||
// interface TerraformOutput {
|
||||
// value: any;
|
||||
// type: string | string[];
|
||||
// sensitive?: boolean;
|
||||
// }
|
||||
//
|
||||
// interface TerraformState {
|
||||
// outputs: Record<string, TerraformOutput>;
|
||||
// }
|
||||
//
|
||||
// export function extractOutputsToFile(successfulDeploy: boolean = true ) {
|
||||
// if(!successfulDeploy) {
|
||||
// console.log("[INFO] Skipping outputs extraction, because the deployment was not successful.")
|
||||
// return
|
||||
// }
|
||||
// const stateFilePath = path.join(__dirname, 'terraform.zitadel-dev.tfstate');
|
||||
// const outputFilePath = path.join(__dirname, 'terraform-outputs.json');
|
||||
//
|
||||
// try {
|
||||
// // Read the terraform state file
|
||||
// const stateContent = fs.readFileSync(stateFilePath, 'utf-8');
|
||||
// const state: TerraformState = JSON.parse(stateContent);
|
||||
//
|
||||
// // Extract outputs with their values (unmasked)
|
||||
// const outputs: Record<string, any> = {};
|
||||
//
|
||||
// for (const [key, output] of Object.entries(state.outputs)) {
|
||||
// outputs[key] = {
|
||||
// value: output.value,
|
||||
// type: output.type,
|
||||
// sensitive: output.sensitive || false
|
||||
// };
|
||||
// }
|
||||
//
|
||||
// // Write outputs to file
|
||||
// fs.writeFileSync(outputFilePath, JSON.stringify(outputs, null, 2));
|
||||
//
|
||||
// console.log(`✅ Terraform outputs successfully written to: ${outputFilePath}`);
|
||||
// console.log(`📋 Extracted ${Object.keys(outputs).length} outputs:`);
|
||||
//
|
||||
// // Display summary without showing sensitive values in console
|
||||
// for (const [key, output] of Object.entries(outputs)) {
|
||||
// if (output.sensitive) {
|
||||
// console.log(` - ${key}: [SENSITIVE - written to file unmasked]`);
|
||||
// } else {
|
||||
// console.log(` - ${key}: ${JSON.stringify(output.value)}`);
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// } catch (error) {
|
||||
// console.error('❌ Error extracting outputs:', error);
|
||||
// process.exit(1);
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// // Run the extraction
|
||||
// extractOutputsToFile();
|
39
packages/scripts/legacy/update-vars.ts
Executable file
39
packages/scripts/legacy/update-vars.ts
Executable file
@@ -0,0 +1,39 @@
|
||||
// Legacy: not used
|
||||
// #!/usr/bin/env bun
|
||||
//
|
||||
// import {readFileSync, writeFileSync} from "fs";
|
||||
// import {execSync} from "child_process";
|
||||
//
|
||||
//
|
||||
// export function configureDevVars() {
|
||||
// const terraformOutputs = JSON.parse(readFileSync("terraform-outputs.json", 'utf-8'));
|
||||
//
|
||||
// interface DevVarsConfig {
|
||||
// CLIENT_ID: string;
|
||||
// CLIENT_SECRET: string;
|
||||
// AUTH_SERVER_URL: string;
|
||||
// APP_URL: string;
|
||||
// DEV_MODE: string;
|
||||
// ZITADEL_ORG_ID: string;
|
||||
// ZITADEL_PROJECT_ID: string;
|
||||
// }
|
||||
//
|
||||
// const destinationConfig: DevVarsConfig = {
|
||||
// CLIENT_ID: terraformOutputs.client_id.value,
|
||||
// CLIENT_SECRET: terraformOutputs.client_secret.value,
|
||||
// AUTH_SERVER_URL: "https://machine.127.0.0.1.sslip.io",
|
||||
// APP_URL: "http://localhost:8787",
|
||||
// DEV_MODE: "true",
|
||||
// ZITADEL_ORG_ID: terraformOutputs.created_org.value.id,
|
||||
// ZITADEL_PROJECT_ID: terraformOutputs.created_project.value.id,
|
||||
// }
|
||||
//
|
||||
// const repoRoot = execSync('git rev-parse --show-toplevel').toString().trim();
|
||||
// const formattedConfig = Object.entries(destinationConfig)
|
||||
// .map(([key, value]) => `${key}="${value}"`)
|
||||
// .join('\n');
|
||||
//
|
||||
// writeFileSync(`${repoRoot}/.dev.vars`, formattedConfig);
|
||||
// }
|
||||
//
|
||||
// configureDevVars()
|
16
packages/scripts/setup.sh
Executable file
16
packages/scripts/setup.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
set -e
|
||||
|
||||
(cargo check &)
|
||||
|
||||
bun i
|
||||
|
||||
for dir in deploy/dev/*/; do
|
||||
if [ -f "${dir}/cdktf.json" ]; then
|
||||
echo "Running cdktf get in ${dir}"
|
||||
cd "${dir}" && cdktf get && cd - > /dev/null
|
||||
fi
|
||||
done
|
||||
|
||||
wait
|
14
packages/scripts/teardown_all.sh
Normal file
14
packages/scripts/teardown_all.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
echo "WARNING: This will destroy all local deployments."
|
||||
echo -n "Are you sure you want to proceed? (y/N): "
|
||||
read -r response
|
||||
|
||||
if [[ ! "$response" =~ ^[Yy]$ ]]; then
|
||||
echo "Teardown cancelled."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
(cd deploy/dev/cluster && bun run destroy)
|
||||
(cd deploy/dev/components && bun run destroy)
|
||||
(cd deploy/dev/configurations && bun run destroy)
|
16
packages/scripts/trust_cluster_cert.sh
Normal file
16
packages/scripts/trust_cluster_cert.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
CERT_PATH="/tmp/kind-cluster.crt"
|
||||
|
||||
echo "Getting cluster certificate from Kubernetes secret..."
|
||||
kubectl get secret zitadel-tls -n default -o jsonpath='{.data.tls\.crt}' | base64 -d > "${CERT_PATH}"
|
||||
|
||||
if [ ! -f "${CERT_PATH}" ]; then
|
||||
echo "Error: Certificate file ${CERT_PATH} not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Adding certificate to macOS keychain..."
|
||||
# macos specific
|
||||
sudo security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain "${CERT_PATH}"
|
||||
echo "Certificate successfully added to keychain"
|
16
packages/scripts/trust_zitadel_cert.sh
Normal file
16
packages/scripts/trust_zitadel_cert.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
CERT_PATH="/tmp/zitadel.crt"
|
||||
|
||||
echo "Getting ZITADEL certificate from Kubernetes secret..."
|
||||
kubectl get secret zitadel-tls -n default -o jsonpath='{.data.tls\.crt}' | base64 -d > "${CERT_PATH}"
|
||||
|
||||
if [ ! -f "${CERT_PATH}" ]; then
|
||||
echo "Error: Certificate file ${CERT_PATH} not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Adding ZITADEL certificate to macOS keychain..."
|
||||
# macos specific
|
||||
sudo security add-trusted-cert -d -r trustRoot -k /Library/Keychains/System.keychain "${CERT_PATH}"
|
||||
echo "ZITADEL certificate successfully added to keychain"
|
12
packages/scripts/untrust_dev_certs.sh
Normal file
12
packages/scripts/untrust_dev_certs.sh
Normal file
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
untrust_cert() {
|
||||
cert_path=$1
|
||||
echo "Removing trust for development certificate"
|
||||
sudo security remove-trusted-cert -d $cert_path
|
||||
}
|
||||
|
||||
untrust_cert ./cluster.crt
|
||||
untrust_cert ./zitadel.crt
|
||||
|
||||
echo "Development certificates successfully removed from system trust store"
|
Reference in New Issue
Block a user