Index: src/java/org/apache/nutch/crawl/CrawlDb.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDb.java	(revision 553191)
+++ src/java/org/apache/nutch/crawl/CrawlDb.java	(working copy)
@@ -75,6 +75,7 @@
       LOG.info("CrawlDb update: URL filtering: " + filter);
     }
 
+    boolean update = false;
     JobConf job = CrawlDb.createJob(getConf(), crawlDb);
     job.setBoolean(CRAWLDB_ADDITIONS_ALLOWED, additionsAllowed);
     job.setBoolean(CrawlDbFilter.URL_FILTERING, filter);
@@ -85,6 +86,7 @@
       if (fs.exists(fetch) && fs.exists(parse)) {
         job.addInputPath(fetch);
         job.addInputPath(parse);
+        update= true ;
       } else {
         LOG.info(" - skipping invalid segment " + segments[i]);
       }
@@ -89,20 +91,26 @@
         LOG.info(" - skipping invalid segment " + segments[i]);
       }
     }
+    
+    //check if valid segments have been added
+    if(update){
+    	if (LOG.isInfoEnabled()) {
+    		LOG.info("CrawlDb update: Merging segment data into db.");
+    	}
+    	try {
+    		JobClient.runJob(job);
+    	} catch (IOException e) {
+    		LockUtil.removeLockFile(fs, lock);
+    		if (fs.exists(job.getOutputPath())) fs.delete(job.getOutputPath());
+    		throw e;
+    	}
 
-    if (LOG.isInfoEnabled()) {
-      LOG.info("CrawlDb update: Merging segment data into db.");
+    	CrawlDb.install(job, crawlDb);
+    	if (LOG.isInfoEnabled()) { LOG.info("CrawlDb update: done"); }
+    }else{     
+    	if (LOG.isInfoEnabled()) { LOG.info("CrawlDb: nothing to update"); } 
+    	LockUtil.removeLockFile(fs, lock);
     }
-    try {
-      JobClient.runJob(job);
-    } catch (IOException e) {
-      LockUtil.removeLockFile(fs, lock);
-      if (fs.exists(job.getOutputPath())) fs.delete(job.getOutputPath());
-      throw e;
-    }
-
-    CrawlDb.install(job, crawlDb);
-    if (LOG.isInfoEnabled()) { LOG.info("CrawlDb update: done"); }
   }
 
   public static JobConf createJob(Configuration config, Path crawlDb)

