import { mkdir, writeFile } from 'node:fs/promises';
import { resolve } from 'node:path';
const publicRoutes = [
{ path: '/', changefreq: 'weekly', priority: '1.0' },
{ path: '/product', changefreq: 'weekly', priority: '0.8' },
{ path: '/pricing', changefreq: 'monthly', priority: '0.7' },
{ path: '/blogs', changefreq: 'weekly', priority: '0.6' },
{ path: '/guides', changefreq: 'weekly', priority: '0.6' },
];
const disallowedRoutes = [
'/app/',
'/login',
'/register',
'/forgot-password',
'/reset-password',
'/verify-email',
];
const siteUrl = (process.env.VITE_PUBLIC_SITE_URL ?? process.env.SITE_URL ?? 'http://localhost:5173')
.replace(/\/$/, '');
const distDir = resolve(process.cwd(), 'dist');
function absoluteUrl(path) {
return new URL(path, `${siteUrl}/`).toString();
}
const robots = [
'User-agent: *',
'Allow: /',
...disallowedRoutes.map(route => `Disallow: ${route}`),
'',
`Sitemap: ${absoluteUrl('/sitemap.xml')}`,
'',
].join('\n');
const sitemapEntries = publicRoutes
.map(route => [
' ',
` ${absoluteUrl(route.path)}`,
` ${route.changefreq}`,
` ${route.priority}`,
' ',
].join('\n'))
.join('\n');
const sitemap = [
'',
'',
sitemapEntries,
'',
'',
].join('\n');
await mkdir(distDir, { recursive: true });
await writeFile(resolve(distDir, 'robots.txt'), robots);
await writeFile(resolve(distDir, 'sitemap.xml'), sitemap);
console.log(`Wrote public SEO files for ${siteUrl}`);