Remove dynamic robots.txt and move to static txt

This commit is contained in:
Raven Scott 2024-09-26 04:40:07 -04:00
parent e9101d6df5
commit 388d489181
3 changed files with 3 additions and 32 deletions

27
app.js
View File

@ -313,33 +313,6 @@ app.get('/rss', (req, res) => {
res.send(rssFeed); res.send(rssFeed);
}); });
// Function to parse markdown and format as robots.txt
function generateRobotsTxt(env) {
const robotsConfigFile = path.join(__dirname, 'me', 'robots.md');
const markdownContent = fs.readFileSync(robotsConfigFile, 'utf-8');
const sections = markdownContent.split('###').map(section => section.trim()).filter(Boolean);
let configSection = sections.find(section => section.startsWith(env.charAt(0).toUpperCase() + env.slice(1)));
if (configSection) {
configSection = configSection.split('\n').slice(1); // Remove the section title
return configSection.map(line => line.replace('- ', '')).join('\n'); // Remove Markdown list dashes
}
// Default fallback if no matching environment is found
return 'User-agent: *\nDisallow: /';
}
// Robots.txt Route
app.get('/robots.txt', (req, res) => {
const env = process.env.NODE_ENV || 'development'; // Default to 'development' if not set
const robotsContent = generateRobotsTxt(env);
res.type('text/plain');
res.send(robotsContent);
});
// Create a URL object from the environment variable // Create a URL object from the environment variable
const blog_URL = new URL(process.env.BLOG_URL); const blog_URL = new URL(process.env.BLOG_URL);

View File

@ -1,5 +0,0 @@
### Production
- User-agent: *
- Allow: /
- Sitemap: https://raven-scott.fyi/sitemap.xml

3
public/robots.txt Normal file
View File

@ -0,0 +1,3 @@
User-agent: *
Allow: /
Sitemap: https://raven-scott.fyi/sitemap.xml