Index: src/java/org/apache/nutch/crawl/CrawlDbReader.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDbReader.java	(revision 1669125)
+++ src/java/org/apache/nutch/crawl/CrawlDbReader.java	(working copy)
@@ -34,7 +34,7 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.FloatWritable;
@@ -60,6 +60,8 @@
 import org.apache.hadoop.mapred.lib.IdentityMapper;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
 import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.apache.nutch.util.NutchConfiguration;
 import org.apache.nutch.util.NutchJob;
 import org.apache.nutch.util.StringUtil;
@@ -70,13 +72,13 @@
  * @author Andrzej Bialecki
  *
  */
-public class CrawlDbReader implements Closeable {
+public class CrawlDbReader extends Configured implements Closeable, Tool {
 
   public static final Logger LOG = LoggerFactory.getLogger(CrawlDbReader.class);
 
   private MapFile.Reader[] readers = null;
 
-  private void openReaders(String crawlDb, Configuration config) throws IOException {
+  private void openReaders(String crawlDb, JobConf config) throws IOException {
     if (readers != null) return;
     FileSystem fs = FileSystem.get(config);
     readers = MapFileOutputFormat.getReaders(fs, new Path(crawlDb,
@@ -301,7 +303,7 @@
     closeReaders();
   }
 
-  public void processStatJob(String crawlDb, Configuration config, boolean sort) throws IOException {
+  public void processStatJob(String crawlDb, JobConf config, boolean sort) throws IOException {
 
     if (LOG.isInfoEnabled()) {
       LOG.info("CrawlDb statistics start: " + crawlDb);
@@ -388,7 +390,7 @@
 
   }
 
-  public CrawlDatum get(String crawlDb, String url, Configuration config) throws IOException {
+  public CrawlDatum get(String crawlDb, String url, JobConf config) throws IOException {
     Text key = new Text(url);
     CrawlDatum val = new CrawlDatum();
     openReaders(crawlDb, config);
@@ -397,7 +399,7 @@
     return res;
   }
 
-  public void readUrl(String crawlDb, String url, Configuration config) throws IOException {
+  public void readUrl(String crawlDb, String url, JobConf config) throws IOException {
     CrawlDatum res = get(crawlDb, url, config);
     System.out.println("URL: " + url);
     if (res != null) {
@@ -407,7 +409,7 @@
     }
   }
 
-  public void processDumpJob(String crawlDb, String output, Configuration config, String format, String regex, String status, Integer retry) throws IOException {
+  public void processDumpJob(String crawlDb, String output, JobConf config, String format, String regex, String status, Integer retry) throws IOException {
     if (LOG.isInfoEnabled()) {
       LOG.info("CrawlDb dump: starting");
       LOG.info("CrawlDb db: " + crawlDb);
@@ -484,7 +486,7 @@
     }
   }
 
-  public void processTopNJob(String crawlDb, long topN, float min, String output, Configuration config) throws IOException {
+  public void processTopNJob(String crawlDb, long topN, float min, String output, JobConf config) throws IOException {
 
     if (LOG.isInfoEnabled()) {
       LOG.info("CrawlDb topN: starting (topN=" + topN + ", min=" + min + ")");
@@ -539,7 +541,7 @@
 
   }
 
-  public static void main(String[] args) throws IOException {
+  public int run(String[] args) throws IOException {
     CrawlDbReader dbr = new CrawlDbReader();
 
     if (args.length < 2) {
@@ -558,11 +560,11 @@
       System.err.println("\t-topN <nnnn> <out_dir> [<min>]\tdump top <nnnn> urls sorted by score to <out_dir>");
       System.err.println("\t\t[<min>]\tskip records with scores below this value.");
       System.err.println("\t\t\tThis can significantly improve performance.");
-      return;
+      return -1;
     }
     String param = null;
     String crawlDb = args[0];
-    Configuration conf = NutchConfiguration.create();
+    JobConf job = new NutchJob(getConf());
     for (int i = 1; i < args.length; i++) {
       if (args[i].equals("-stats")) {
         boolean toSort = false;
@@ -570,7 +572,7 @@
           toSort = true;
           i++;
         }
-        dbr.processStatJob(crawlDb, conf, toSort);
+        dbr.processStatJob(crawlDb, job, toSort);
       } else if (args[i].equals("-dump")) {
         param = args[++i];
         String format = "normal";
@@ -595,10 +597,10 @@
             i=i+2;
           }
         }
-        dbr.processDumpJob(crawlDb, param, conf, format, regex, status, retry);
+        dbr.processDumpJob(crawlDb, param, job, format, regex, status, retry);
       } else if (args[i].equals("-url")) {
         param = args[++i];
-        dbr.readUrl(crawlDb, param, conf);
+        dbr.readUrl(crawlDb, param, job);
       } else if (args[i].equals("-topN")) {
         param = args[++i];
         long topN = Long.parseLong(param);
@@ -607,11 +609,18 @@
         if (i < args.length - 1) {
           min = Float.parseFloat(args[++i]);
         }
-        dbr.processTopNJob(crawlDb, topN, min, param, conf);
+        dbr.processTopNJob(crawlDb, topN, min, param, job);
       } else {
         System.err.println("\nError: wrong argument " + args[i]);
+        return -1;
       }
     }
-    return;
+    return 0;
   }
-}
+  
+    public static void main(String[] args) throws Exception {
+        int result = ToolRunner.run(NutchConfiguration.create(),
+                new CrawlDbReader(), args);
+        System.exit(result);
+    }
+}
\ No newline at end of file
Index: src/java/org/apache/nutch/tools/Benchmark.java
===================================================================
--- src/java/org/apache/nutch/tools/Benchmark.java	(revision 1669125)
+++ src/java/org/apache/nutch/tools/Benchmark.java	(working copy)
@@ -261,7 +261,7 @@
     if (LOG.isInfoEnabled()) { LOG.info("crawl finished: " + dir); }
     res.elapsed = System.currentTimeMillis() - res.elapsed;
     CrawlDbReader dbreader = new CrawlDbReader();
-    dbreader.processStatJob(crawlDb.toString(), conf, false);
+    dbreader.processStatJob(crawlDb.toString(), job, false);
     return res;
   }
 
