README
express-seo-robots
Simple robots.txt file generation for express
Install
npm install --save express-seo-robots
Usage
Add middleware to your express application before all other routes.
var express = require('express');
var app = express();
var robotsTxt = require('express-seo-robots');
// add middleware with robots.txt config
app.use(robotsTxt({ userAgent: '*', allow: '/', sitemap: 'https://yourdomain.com/sitemap.xml' }));
// standard express route
app.get('/', function(req, res) {
res.send('Hello world!');
});
Based on the setup above, a requests to /robots.txt
will now return:
User-agent: *
Allow: /
Sitemap: https://yourdomain.com/sitemap.xml
You can provide multiple entries by passing an array of values, the following:
// add robots.txt middleware with custom config
app.use(robotsTxt([
{ userAgent: 'Googlebot-news', allow: '/news', CrawlDelay: '5' },
{ userAgent: 'Googlebot', disallow: '/private' },
{ userAgent: 'Googlebot' disallow: '/*.xls