Add dynamic robots.txt generation and update sitemap handling

- Remove static robots.txt file.
- Implement dynamic robots.txt generator script (`generate_robots_txt.js`) to allow hostname flexibility.
- Update sitemap generation script to also use dynamic hostname from arguments.
- Modify Vite config to include automated generation of both sitemap and robots.txt during build.
- Add `public/robots.txt` to `.gitignore`.
This commit is contained in:
geoffsee
2025-05-28 22:04:28 -04:00
parent 82ba490dae
commit db5137e772
5 changed files with 56 additions and 10 deletions

View File

@@ -1,19 +1,30 @@
#!/usr/bin/env bun
import fs from "fs";
import {parseArgs} from "util";
const {positionals} = parseArgs({
args: Bun.argv,
options: {},
strict: true,
allowPositionals: true,
});
const currentDate = new Date().toISOString().split("T")[0];
const host = positionals[2];
const sitemapTemplate = `<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9 ">
<url>
<loc>https://geoff.seemueller.io/</loc>
<loc>https://${host}/</loc>
<lastmod>${currentDate}</lastmod>
<priority>1.0</priority>
</url>
<url>
<loc>https://geoff.seemueller.io/connect</loc>
<loc>https://open-gsio.seemueller.workers.dev/connect</loc>
<lastmod>${currentDate}</lastmod>
<priority>0.7</priority>
</url>