Files
open-gsio/scripts/generate_robots_txt.js
geoffsee db5137e772 Add dynamic robots.txt generation and update sitemap handling
- Remove static robots.txt file.
- Implement dynamic robots.txt generator script (`generate_robots_txt.js`) to allow hostname flexibility.
- Update sitemap generation script to also use dynamic hostname from arguments.
- Modify Vite config to include automated generation of both sitemap and robots.txt during build.
- Add `public/robots.txt` to `.gitignore`.
2025-05-28 22:04:28 -04:00

37 lines
685 B
JavaScript
Executable File

#!/usr/bin/env bun
import fs from "fs";
import {parseArgs} from "util";
const {positionals} = parseArgs({
args: Bun.argv,
options: {},
strict: true,
allowPositionals: true,
});
const currentDate = new Date().toISOString().split("T")[0];
const host = positionals[2];
const robotsTxtTemplate = `
User-agent: *
Allow: /
Allow: /connect
Disallow: /api
Disallow: /assets
Sitemap: https://${host}/sitemap.xml
`;
const robotsTxtPath = "./public/robots.txt";
fs.writeFile(robotsTxtPath, robotsTxtTemplate, (err) => {
if (err) {
console.error("Error writing robots.txt:", err);
process.exit(1);
}
console.log("robots.txt created successfully:", currentDate);
});