Add dynamically generated robots.txt from MarkDown

This commit is contained in:
Raven Scott 2024-09-26 04:32:47 -04:00
parent da0de2464e
commit 8e7aef938e
2 changed files with 38 additions and 0 deletions

27
app.js
View File

@ -313,6 +313,33 @@ app.get('/rss', (req, res) => {
res.send(rssFeed); res.send(rssFeed);
}); });
// Function to parse markdown and format as robots.txt
function generateRobotsTxt(env) {
const robotsConfigFile = path.join(__dirname, 'me', 'robots.md');
const markdownContent = fs.readFileSync(robotsConfigFile, 'utf-8');
const sections = markdownContent.split('###').map(section => section.trim()).filter(Boolean);
let configSection = sections.find(section => section.startsWith(env.charAt(0).toUpperCase() + env.slice(1)));
if (configSection) {
configSection = configSection.split('\n').slice(1); // Remove the section title
return configSection.map(line => line.replace('- ', '')).join('\n'); // Remove Markdown list dashes
}
// Default fallback if no matching environment is found
return 'User-agent: *\nDisallow: /';
}
// Robots.txt Route
app.get('/robots.txt', (req, res) => {
const env = process.env.NODE_ENV || 'development'; // Default to 'development' if not set
const robotsContent = generateRobotsTxt(env);
res.type('text/plain');
res.send(robotsContent);
});
// Create a URL object from the environment variable // Create a URL object from the environment variable
const blog_URL = new URL(process.env.BLOG_URL); const blog_URL = new URL(process.env.BLOG_URL);

11
me/robots.md Normal file
View File

@ -0,0 +1,11 @@
### Production
- User-agent: *
- Allow: /
- Sitemap: https://raven-scott.fyi/sitemap.xml
### Development
- User-agent: *
- Disallow: /