-
Notifications
You must be signed in to change notification settings - Fork 2
/
generateRobotsTxt.js
43 lines (34 loc) · 1.22 KB
/
generateRobotsTxt.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
const fileSystem = require('fs');
const path = require('path');
// Path to the existing robots.txt file
const robotsFilePath = path.join(__dirname, 'public', 'robots.txt');
// Import the merged agents file
const agents = require('./agents.json');
// Generate the rules for allowed agents
let rules = [];
agents.allowed.forEach(userAgent => {
rules.push(`User-agent: ${userAgent}\nAllow: /`);
});
// Generate the rules for disallowed agents
agents.disallowed.forEach(userAgent => {
rules.push(`User-agent: ${userAgent}\nDisallow: /`);
});
// Combine the rules into a single string
const customRules = rules.join("\n\n");
// Read the existing robots.txt file
fileSystem.readFile(robotsFilePath, 'utf8', (err, existingContent) => {
if (err) {
console.error('Error reading robots.txt:', err);
return;
}
// Merge the existing content with your custom rules
const mergedContent = `${customRules.trim()}\n\n${existingContent.trim()}\n`;
// Write the merged content back to robots.txt
fileSystem.writeFile(robotsFilePath, mergedContent, 'utf8', err => {
if (err) {
console.error('Error writing to robots.txt:', err);
} else {
console.log('robots.txt file updated with merged content');
}
});
});