Index: src/java/org/apache/nutch/fetcher/Fetcher.java
===================================================================
--- src/java/org/apache/nutch/fetcher/Fetcher.java	(revision 1027060)
+++ src/java/org/apache/nutch/fetcher/Fetcher.java	(working copy)
@@ -541,6 +541,7 @@
     private boolean redirecting;
     private int redirectCount;
     private boolean ignoreExternalLinks;
+    private boolean checkRobots = true;
 
     public FetcherThread(Configuration conf) {
       this.setDaemon(true);                       // don't hang JVM on exit
@@ -556,6 +557,7 @@
       this.maxRedirect = conf.getInt("http.redirect.max", 3);
       this.ignoreExternalLinks = 
         conf.getBoolean("db.ignore.external.links", false);
+      this.checkRobots = conf.getBoolean(Protocol.CHECK_ROBOTS, true);
     }
 
     public void run() {
@@ -601,29 +603,31 @@
               }
               redirecting = false;
               Protocol protocol = this.protocolFactory.getProtocol(fit.url.toString());
-              RobotRules rules = protocol.getRobotRules(fit.url, fit.datum);
-              if (!rules.isAllowed(fit.u)) {
-                // unblock
-                fetchQueues.finishFetchItem(fit, true);
-                if (LOG.isDebugEnabled()) {
-                  LOG.debug("Denied by robots.txt: " + fit.url);
-                }
-                output(fit.url, fit.datum, null, ProtocolStatus.STATUS_ROBOTS_DENIED, CrawlDatum.STATUS_FETCH_GONE);
-                reporter.incrCounter("FetcherStatus", "robots_denied", 1);
-                continue;
-              }
-              if (rules.getCrawlDelay() > 0) {
-                if (rules.getCrawlDelay() > maxCrawlDelay) {
+              if (checkRobots) {
+                RobotRules rules = protocol.getRobotRules(fit.url, fit.datum);
+                if (!rules.isAllowed(fit.u)) {
                   // unblock
                   fetchQueues.finishFetchItem(fit, true);
-                  LOG.debug("Crawl-Delay for " + fit.url + " too long (" + rules.getCrawlDelay() + "), skipping");
+                  if (LOG.isDebugEnabled()) {
+                    LOG.debug("Denied by robots.txt: " + fit.url);
+                  }
                   output(fit.url, fit.datum, null, ProtocolStatus.STATUS_ROBOTS_DENIED, CrawlDatum.STATUS_FETCH_GONE);
-                  reporter.incrCounter("FetcherStatus", "robots_denied_maxcrawldelay", 1);
+                  reporter.incrCounter("FetcherStatus", "robots_denied", 1);
                   continue;
-                } else {
-                  FetchItemQueue fiq = fetchQueues.getFetchItemQueue(fit.queueID);
-                  fiq.crawlDelay = rules.getCrawlDelay();
                 }
+                if (rules.getCrawlDelay() > 0) {
+                  if (rules.getCrawlDelay() > maxCrawlDelay) {
+                    // unblock
+                    fetchQueues.finishFetchItem(fit, true);
+                    LOG.debug("Crawl-Delay for " + fit.url + " too long (" + rules.getCrawlDelay() + "), skipping");
+                    output(fit.url, fit.datum, null, ProtocolStatus.STATUS_ROBOTS_DENIED, CrawlDatum.STATUS_FETCH_GONE);
+                    reporter.incrCounter("FetcherStatus", "robots_denied_maxcrawldelay", 1);
+                    continue;
+                  } else {
+                    FetchItemQueue fiq = fetchQueues.getFetchItemQueue(fit.queueID);
+                    fiq.crawlDelay = rules.getCrawlDelay();
+                  }
+                }
               }
               ProtocolOutput output = protocol.getProtocolOutput(fit.url, fit.datum);
               ProtocolStatus status = output.getStatus();
