forked from snxraven/ravenscott-blog
Add dynamically generated robots.txt from MarkDown
This commit is contained in:
parent
da0de2464e
commit
8e7aef938e
27
app.js
27
app.js
@ -313,6 +313,33 @@ app.get('/rss', (req, res) => {
|
||||
res.send(rssFeed);
|
||||
});
|
||||
|
||||
|
||||
// Function to parse markdown and format as robots.txt
|
||||
function generateRobotsTxt(env) {
|
||||
const robotsConfigFile = path.join(__dirname, 'me', 'robots.md');
|
||||
const markdownContent = fs.readFileSync(robotsConfigFile, 'utf-8');
|
||||
const sections = markdownContent.split('###').map(section => section.trim()).filter(Boolean);
|
||||
|
||||
let configSection = sections.find(section => section.startsWith(env.charAt(0).toUpperCase() + env.slice(1)));
|
||||
|
||||
if (configSection) {
|
||||
configSection = configSection.split('\n').slice(1); // Remove the section title
|
||||
return configSection.map(line => line.replace('- ', '')).join('\n'); // Remove Markdown list dashes
|
||||
}
|
||||
|
||||
// Default fallback if no matching environment is found
|
||||
return 'User-agent: *\nDisallow: /';
|
||||
}
|
||||
|
||||
// Robots.txt Route
|
||||
app.get('/robots.txt', (req, res) => {
|
||||
const env = process.env.NODE_ENV || 'development'; // Default to 'development' if not set
|
||||
const robotsContent = generateRobotsTxt(env);
|
||||
|
||||
res.type('text/plain');
|
||||
res.send(robotsContent);
|
||||
});
|
||||
|
||||
// Create a URL object from the environment variable
|
||||
const blog_URL = new URL(process.env.BLOG_URL);
|
||||
|
||||
|
11
me/robots.md
Normal file
11
me/robots.md
Normal file
@ -0,0 +1,11 @@
|
||||
### Production
|
||||
|
||||
- User-agent: *
|
||||
- Allow: /
|
||||
- Sitemap: https://raven-scott.fyi/sitemap.xml
|
||||
|
||||
### Development
|
||||
|
||||
- User-agent: *
|
||||
- Disallow: /
|
||||
|
Loading…
Reference in New Issue
Block a user