Index: conf/nutch-default.xml
===================================================================
--- conf/nutch-default.xml	(revision 1296763)
+++ conf/nutch-default.xml	(working copy)
@@ -720,6 +720,14 @@
 </property>
 
 <property>
+  <name>fetcher.max.exceptions.per.queue.percent</name>
+  <value>0.50</value>
+  <description>The maximum number of protocol-level exceptions (e.g. timeouts) per requests for
+  host (or IP) queue. ignore fetcher.max.exceptions.per.queue
+  </description>
+</property>
+
+<property>
   <name>fetcher.throughput.threshold.pages</name>
   <value>-1</value>
   <description>The threshold of minimum pages per second. If the fetcher downloads less
Index: src/java/org/apache/nutch/fetcher/Fetcher.java
===================================================================
--- src/java/org/apache/nutch/fetcher/Fetcher.java	(revision 1296763)
+++ src/java/org/apache/nutch/fetcher/Fetcher.java	(working copy)
@@ -228,6 +228,7 @@
     Set<FetchItem>  inProgress = Collections.synchronizedSet(new HashSet<FetchItem>());
     AtomicLong nextFetchTime = new AtomicLong();
     AtomicInteger exceptionCounter = new AtomicInteger();
+    AtomicInteger sentCounter = new AtomicInteger();
     long crawlDelay;
     long minCrawlDelay;
     int maxThreads;
@@ -259,7 +260,12 @@
     public int incrementExceptionCounter() {
       return exceptionCounter.incrementAndGet();
     }
-
+    public void incrementSentCounter() {
+        sentCounter.incrementAndGet();
+    }
+    public int getSentCounter() {
+        return sentCounter.get();
+    }
     public void finishFetchItem(FetchItem it, boolean asap) {
       if (it != null) {
         inProgress.remove(it);
@@ -330,6 +336,7 @@
     long minCrawlDelay;
     long timelimit = -1;
     int maxExceptionsPerQueue = -1;
+    float maxExceptionsPerQueuePercent=-1.0f;
     Configuration conf;
 
     public static final String QUEUE_MODE_HOST = "byHost";
@@ -354,6 +361,7 @@
       this.minCrawlDelay = (long) (conf.getFloat("fetcher.server.min.delay", 0.0f) * 1000);
       this.timelimit = conf.getLong("fetcher.timelimit", -1);
       this.maxExceptionsPerQueue = conf.getInt("fetcher.max.exceptions.per.queue", -1);
+      this.maxExceptionsPerQueuePercent = conf.getFloat("fetcher.max.exceptions.per.queue.percent", -1.0f);
     }
 
     public int getTotalSize() {
@@ -458,6 +466,18 @@
      * @param queueid
      * @return number of purged items
      */
+
+    public synchronized void incrementSentCounter(String queueid){
+        FetchItemQueue fiq = queues.get(queueid);
+        if (fiq == null) {
+	        return;
+        }
+        if (fiq.getQueueSize() == 0) {
+	        return;
+        }
+        fiq.incrementSentCounter();
+    }
+
     public synchronized int checkExceptionThreshold(String queueid) {
       FetchItemQueue fiq = queues.get(queueid);
       if (fiq == null) {
@@ -467,8 +487,9 @@
         return 0;
       }
       int excCount = fiq.incrementExceptionCounter();
-      if (maxExceptionsPerQueue!= -1 && excCount >= maxExceptionsPerQueue) {
-        // too many exceptions for items in this queue - purge it
+      float excThreshold=(float)excCount/(float)fiq.getSentCounter();
+	if (maxExceptionsPerQueuePercent!= -1 && excThreshold >= maxExceptionsPerQueuePercent) {
+        // too many exceptions per sent requests for items in this queue - purge it
         int deleted = fiq.emptyQueue();
         LOG.info("* queue: " + queueid + " >> removed " + deleted
             + " URLs from queue because " + excCount + " exceptions occurred");
@@ -689,6 +710,7 @@
                   fiq.crawlDelay = rules.getCrawlDelay();
                 }
               }
+	      fetchQueues.incrementSentCounter(fit.getQueueID());
               ProtocolOutput output = protocol.getProtocolOutput(fit.url, fit.datum);
               ProtocolStatus status = output.getStatus();
               Content content = output.getContent();
