fix: generate robots.txt dynamically from site base URL
Replace static robots.txt with an Astro endpoint that uses the configured site URL, so sitemap references are correct on both fork deployments and production.
This commit is contained in:
parent
d85e7779bd
commit
7f1f7b614b
|
|
@ -1,5 +1,10 @@
|
|||
# BMAD Method Documentation
|
||||
# https://docs.bmad-method.org/
|
||||
import type { APIRoute } from 'astro';
|
||||
|
||||
export const GET: APIRoute = ({ site }) => {
|
||||
const siteUrl = site?.href.replace(/\/$/, '') ?? '';
|
||||
|
||||
const body = `# BMAD Method Documentation
|
||||
# ${siteUrl}/
|
||||
#
|
||||
# This file controls web crawler access to the documentation site.
|
||||
|
||||
|
|
@ -34,4 +39,10 @@ User-agent: cohere-ai
|
|||
Allow: /
|
||||
|
||||
# Sitemap
|
||||
Sitemap: https://docs.bmad-method.org/sitemap-index.xml
|
||||
Sitemap: ${siteUrl}/sitemap-index.xml
|
||||
`;
|
||||
|
||||
return new Response(body, {
|
||||
headers: { 'Content-Type': 'text/plain; charset=utf-8' },
|
||||
});
|
||||
};
|
||||
Loading…
Reference in New Issue