Index: src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/HttpBase.java
===================================================================
--- src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/HttpBase.java	(revision 410654)
+++ src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/HttpBase.java	(working copy)
@@ -178,12 +178,14 @@
         logger.fine("Exception checking robot rules for " + url + ": " + e);
       }
       
-      String host = blockAddr(u);
+      long crawlDelay = robots.getCrawlDelay(this, u);
+      
+      String host = blockAddr(u, crawlDelay);
       Response response;
       try {
         response = getResponse(u, datum, false); // make a request
       } finally {
-        unblockAddr(host);
+        unblockAddr(host, crawlDelay);
       }
       
       int code = response.getCode();
@@ -293,7 +295,7 @@
     return useHttp11;
   }
   
-  private String blockAddr(URL url) throws ProtocolException {
+  private String blockAddr(URL url, long crawlDelay) throws ProtocolException {
     
     String host;
     if (byIP) {
@@ -336,12 +338,14 @@
       
       if (delays == maxDelays)
         throw new HttpException("Exceeded http.max.delays: retry later.");
+     
+      long delay = crawlDelay > 0 ? crawlDelay : serverDelay;
       
       long done = time.longValue();
       long now = System.currentTimeMillis();
       long sleep = 0;
       if (done == 0) {                            // address is still in use
-        sleep = serverDelay;                      // wait at least delay
+        sleep = delay;                            // wait at least delay
         
       } else if (now < done) {                    // address is on hold
         sleep = done - now;                       // wait until its free
@@ -354,14 +358,17 @@
     }
   }
   
-  private void unblockAddr(String host) {
+  private void unblockAddr(String host, long crawlDelay) {
     synchronized (BLOCKED_ADDR_TO_TIME) {
       int addrCount = ((Integer)THREADS_PER_HOST_COUNT.get(host)).intValue();
       if (addrCount == 1) {
         THREADS_PER_HOST_COUNT.remove(host);
         BLOCKED_ADDR_QUEUE.addFirst(host);
+        
+        long delay = crawlDelay > 0 ? crawlDelay : serverDelay;
+        
         BLOCKED_ADDR_TO_TIME.put
-                (host, new Long(System.currentTimeMillis() + serverDelay));
+                (host, new Long(System.currentTimeMillis() + delay));
       } else {
         THREADS_PER_HOST_COUNT.put(host, new Integer(addrCount - 1));
       }
Index: src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/RobotRulesParser.java
===================================================================
--- src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/RobotRulesParser.java	(revision 410654)
+++ src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/RobotRulesParser.java	(working copy)
@@ -73,6 +73,7 @@
     ArrayList tmpEntries;
     RobotsEntry[] entries;
     long expireTime;
+    long crawlDelay = -1;
 
     /**
      */
@@ -125,6 +126,20 @@
     public long getExpireTime() {
       return expireTime;
     }
+    
+    /**
+     * Get Crawl-Delay
+     */
+    public long getCrawlDelay(){
+    	return this.crawlDelay;
+    }
+    
+    /**
+     * Set Crawl-Delay
+     */
+    public void setCrawlDelay(long crawlDelay) {
+		this.crawlDelay = crawlDelay;
+	}
 
     /** 
      *  Returns <code>false</code> if the <code>robots.txt</code> file
@@ -347,7 +362,21 @@
           if (addRules)
             currentRules.addPrefix(path, true);
         }
-      }
+      } else if ((line.length() >= 12)
+					&& (line.substring(0, 12).equalsIgnoreCase("Crawl-Delay:"))) {
+        doneAgents= true;
+        long crawlDelay = -1;
+        String delay = line.substring("Crawl-Delay:".length(), line.length()).trim();
+        if (delay.length() > 0) {
+          try {
+            crawlDelay = Long.parseLong(delay) * 1000; // sec to millisec
+          } catch (Exception e) {
+            LOG.info("can not parse Crawl-Delay:" + e.toString());
+          }
+          currentRules.setCrawlDelay(crawlDelay);
+        }
+
+      } 
     }
 
     if (currentPrecedence < bestPrecedenceSoFar) {
@@ -381,9 +410,7 @@
     return rules;
   }
   
-  public boolean isAllowed(HttpBase http, URL url)
-    throws ProtocolException, IOException {
-
+  private RobotRuleSet getRobotRulesSet(HttpBase http, URL url) {
     String host = url.getHost();
 
     RobotRuleSet robotRules = (RobotRuleSet)CACHE.get(host);
@@ -407,15 +434,27 @@
 
       CACHE.put(host, robotRules);                // cache rules for host
     }
+    return robotRules;
+  }
+  
+  public boolean isAllowed(HttpBase http, URL url)
+    throws ProtocolException, IOException {
 
     String path = url.getPath();                  // check rules
     if ((path == null) || "".equals(path)) {
       path= "/";
     }
 
-    return robotRules.isAllowed(path);
+    return getRobotRulesSet(http, url).isAllowed(path);
   }
 
+  
+  public long getCrawlDelay(HttpBase http, URL url) 
+    throws ProtocolException, IOException {
+
+    return getRobotRulesSet(http, url).getCrawlDelay();
+  }
+
   private final static int BUFSIZE= 2048;
 
   /** command-line main for testing */
