revert Remove dynamic robots.txt and move to static txt
This commit is contained in:
snxraven 2024-09-26 04:41:43 -04:00
parent 388d489181
commit f64c5df9f3
3 changed files with 32 additions and 3 deletions

27
app.js
View File

@ -313,6 +313,33 @@ app.get('/rss', (req, res) => {
res.send(rssFeed);
});
// Function to parse markdown and format as robots.txt
function generateRobotsTxt(env) {
const robotsConfigFile = path.join(__dirname, 'me', 'robots.md');
const markdownContent = fs.readFileSync(robotsConfigFile, 'utf-8');
const sections = markdownContent.split('###').map(section => section.trim()).filter(Boolean);
let configSection = sections.find(section => section.startsWith(env.charAt(0).toUpperCase() + env.slice(1)));
if (configSection) {
configSection = configSection.split('\n').slice(1); // Remove the section title
return configSection.map(line => line.replace('- ', '')).join('\n'); // Remove Markdown list dashes
}
// Default fallback if no matching environment is found
return 'User-agent: *\nDisallow: /';
}
// Robots.txt Route
app.get('/robots.txt', (req, res) => {
const env = process.env.NODE_ENV || 'development'; // Default to 'development' if not set
const robotsContent = generateRobotsTxt(env);
res.type('text/plain');
res.send(robotsContent);
});
// Create a URL object from the environment variable
const blog_URL = new URL(process.env.BLOG_URL);

5
me/robots.md Normal file
View File

@ -0,0 +1,5 @@
### Production
- User-agent: *
- Allow: /
- Sitemap: https://raven-scott.fyi/sitemap.xml

View File

@ -1,3 +0,0 @@
User-agent: *
Allow: /
Sitemap: https://raven-scott.fyi/sitemap.xml