robots-txt-builder.js 1.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556
  1. import { toArray } from '../utils/array.js';
  2. export class RobotsTxtBuilder {
  3. /**
  4. * Normalize robots.txt policies
  5. * @param policies
  6. * @returns
  7. */
  8. normalizePolicy(policies) {
  9. return policies.map((x) => ({
  10. ...x,
  11. allow: toArray(x.allow ?? []),
  12. disallow: toArray(x.disallow ?? []),
  13. }));
  14. }
  15. /**
  16. * Add new policy
  17. * @param key
  18. * @param rules
  19. * @returns
  20. */
  21. addPolicies(key, rules) {
  22. return rules.reduce((prev, curr) => `${prev}${key}: ${curr}\n`, '');
  23. }
  24. /**
  25. * Generates robots.txt content
  26. * @param config
  27. * @returns
  28. */
  29. generateRobotsTxt(config) {
  30. const { additionalSitemaps, policies } = config.robotsTxtOptions;
  31. const normalizedPolices = this.normalizePolicy(policies);
  32. let content = '';
  33. normalizedPolices.forEach((x) => {
  34. content += `# ${x.userAgent}\nUser-agent: ${x.userAgent}\n`;
  35. if (x.allow) {
  36. content += `${this.addPolicies('Allow', x.allow)}`;
  37. }
  38. if (x.disallow) {
  39. content += `${this.addPolicies('Disallow', x.disallow)}`;
  40. }
  41. if (x.crawlDelay) {
  42. content += `Crawl-delay: ${x.crawlDelay}\n`;
  43. }
  44. content += '\n';
  45. });
  46. // Append host
  47. content += `# Host\nHost: ${config.siteUrl}\n`;
  48. if (additionalSitemaps && additionalSitemaps.length > 0) {
  49. content += `\n# Sitemaps\n`;
  50. additionalSitemaps.forEach((x) => {
  51. content += `Sitemap: ${x}\n`;
  52. });
  53. }
  54. return content;
  55. }
  56. }