Index: src/java/org/apache/nutch/crawl/CrawlDbReader.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDbReader.java	(revision 1627787)
+++ src/java/org/apache/nutch/crawl/CrawlDbReader.java	(working copy)
@@ -30,10 +30,10 @@
 import java.util.regex.Pattern;
 import java.util.TreeMap;
 
+
 // Commons Logging imports
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -100,7 +100,7 @@
       public LineRecordWriter(DataOutputStream out) {
         this.out = out;
         try {
-          out.writeBytes("Url;Status code;Status name;Fetch Time;Modified Time;Retries since fetch;Retry interval seconds;Retry interval days;Score;Signature;Metadata\n");
+          out.writeBytes("Url,Status code,Status name,Fetch Time,Modified Time,Retries since fetch,Retry interval seconds,Retry interval days,Score,Signature,Metadata\n");
         } catch (IOException e) {}
       }
 
@@ -108,29 +108,29 @@
           out.writeByte('"');
           out.writeBytes(key.toString());
           out.writeByte('"');
-          out.writeByte(';');
+          out.writeByte(',');
           out.writeBytes(Integer.toString(value.getStatus()));
-          out.writeByte(';');
+          out.writeByte(',');
           out.writeByte('"');
           out.writeBytes(CrawlDatum.getStatusName(value.getStatus()));
           out.writeByte('"');
-          out.writeByte(';');
+          out.writeByte(',');
           out.writeBytes(new Date(value.getFetchTime()).toString());
-          out.writeByte(';');
+          out.writeByte(',');
           out.writeBytes(new Date(value.getModifiedTime()).toString());
-          out.writeByte(';');
+          out.writeByte(',');
           out.writeBytes(Integer.toString(value.getRetriesSinceFetch()));
-          out.writeByte(';');
+          out.writeByte(',');
           out.writeBytes(Float.toString(value.getFetchInterval()));
-          out.writeByte(';');
+          out.writeByte(',');
           out.writeBytes(Float.toString((value.getFetchInterval() / FetchSchedule.SECONDS_PER_DAY)));
-          out.writeByte(';');
+          out.writeByte(',');
           out.writeBytes(Float.toString(value.getScore()));
-          out.writeByte(';');
+          out.writeByte(',');
           out.writeByte('"');
           out.writeBytes(value.getSignature() != null ? StringUtil.toHexString(value.getSignature()): "null");
           out.writeByte('"');
-          out.writeByte(';');
+          out.writeByte(',');
           out.writeByte('"');
           if (value.getMetaData() != null) {
             for (Entry<Writable, Writable> e : value.getMetaData().entrySet()) {
@@ -509,8 +509,7 @@
     job.setOutputKeyClass(FloatWritable.class);
     job.setOutputValueClass(Text.class);
 
-    // XXX hmmm, no setFloat() in the API ... :(
-    job.setLong("db.reader.topn.min", Math.round(1000000.0 * min));
+    job.setFloat("db.reader.topn.min", Math.round(1000000.0 * min));
     JobClient.runJob(job);
 
     if (LOG.isInfoEnabled()) {
@@ -540,6 +539,7 @@
   }
 
   public static void main(String[] args) throws IOException {
+    @SuppressWarnings("resource")
     CrawlDbReader dbr = new CrawlDbReader();
 
     if (args.length < 2) {
