A node module and command line tool for crawling a website for dead links, permanent and or fatal redirects, resource load issues, and script errors. It is based on the casperJS navigation scripting and testing utility.
var spider = require( "spider.js" );
spider( options );
spiderjs --url=http://example.com [, option1 ] [, option2 ]
var spider = require( "spider.js" );
spider( {
url: "http://example.com",
ignore: "error.html",
redirectError: false
} );
spiderjs --url=http://example.com --ignore=error.html --redirectError=false
var spider = require( "spider.js" );
gulp.task( "spider", function() {
spider( {
url: "http://localhost:8000",
ignore: "error.html"
} );
} );
Type: String
Default value: 'http://localhost'
a valid url for a website. The url willThis url may be local or remote.
Type: String
Default value: ''
A string that will be used to create a regex for excluding urls from being spidered.
Type: String
Default value: false
A file to output test log and results too
Type: Boolean
Default value: true
Wether or not to check 4XX Errors
Type: Boolean
Default value: true
Wether or not to check 3XX Errors
Type: Boolean
Default value: true
Wether or not to check resource Errors
Type: Boolean
Default value: true
Wether or not to check script Errors
Type: Number
Default value: 10
The maximum number of pages to show per link. This helps to prevent excessive output when a link exists on every page of a site like in a header or footer.