Index: src/test/org/apache/nutch/crawl/CrawlDBTestUtil.java
===================================================================
--- src/test/org/apache/nutch/crawl/CrawlDBTestUtil.java	(revision 1169488)
+++ src/test/org/apache/nutch/crawl/CrawlDBTestUtil.java	(working copy)
@@ -21,8 +21,8 @@
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -38,7 +38,7 @@
 
 public class CrawlDBTestUtil {
 
-  private static final Log LOG = LogFactory.getLog(CrawlDBTestUtil.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CrawlDBTestUtil.class);
 
   /**
    * Creates synthetic crawldb
Index: src/java/org/apache/nutch/fetcher/OldFetcher.java
===================================================================
--- src/java/org/apache/nutch/fetcher/OldFetcher.java	(revision 1169488)
+++ src/java/org/apache/nutch/fetcher/OldFetcher.java	(working copy)
@@ -23,8 +23,8 @@
 import java.util.Map.Entry;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.fs.*;
@@ -47,7 +47,7 @@
 /** The fetcher. Most of the work is done by plugins. */
 public class OldFetcher extends Configured implements Tool, MapRunnable<WritableComparable, Writable, Text, NutchWritable> { 
 
-  public static final Log LOG = LogFactory.getLog(OldFetcher.class);
+  public static final Logger LOG = LoggerFactory.getLogger(OldFetcher.class);
   
   public static final int PERM_REFRESH_TIME = 5;
 
@@ -130,9 +130,9 @@
               break;                              // at eof, exit
             }
           } catch (IOException e) {
-            if (LOG.isFatalEnabled()) {
-              e.printStackTrace(LogUtil.getFatalStream(LOG));
-              LOG.fatal("fetcher caught:"+e.toString());
+            if (LOG.isErrorEnabled()) {
+              e.printStackTrace(LogUtil.getErrorStream(LOG));
+              LOG.error("fetcher caught:"+e.toString());
             }
             break;
           }
@@ -253,9 +253,9 @@
         }
 
       } catch (Throwable e) {
-        if (LOG.isFatalEnabled()) {
-          e.printStackTrace(LogUtil.getFatalStream(LOG));
-          LOG.fatal("fetcher caught:"+e.toString());
+        if (LOG.isErrorEnabled()) {
+          e.printStackTrace(LogUtil.getErrorStream(LOG));
+          LOG.error("fetcher caught:"+e.toString());
         }
       } finally {
         synchronized (OldFetcher.this) {activeThreads--;} // count threads
@@ -397,9 +397,9 @@
           }
         }
       } catch (IOException e) {
-        if (LOG.isFatalEnabled()) {
-          e.printStackTrace(LogUtil.getFatalStream(LOG));
-          LOG.fatal("fetcher caught:"+e.toString());
+        if (LOG.isErrorEnabled()) {
+          e.printStackTrace(LogUtil.getErrorStream(LOG));
+          LOG.error("fetcher caught:"+e.toString());
         }
       }
 
@@ -570,7 +570,7 @@
       fetch(segment, threads);              // run the Fetcher
       return 0;
     } catch (Exception e) {
-      LOG.fatal("OldFetcher: " + StringUtils.stringifyException(e));
+      LOG.error("OldFetcher: " + StringUtils.stringifyException(e));
       return -1;
     }
 
Index: src/java/org/apache/nutch/fetcher/Fetcher.java
===================================================================
--- src/java/org/apache/nutch/fetcher/Fetcher.java	(revision 1169488)
+++ src/java/org/apache/nutch/fetcher/Fetcher.java	(working copy)
@@ -28,8 +28,8 @@
 import java.util.concurrent.atomic.AtomicLong;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.fs.*;
@@ -96,7 +96,7 @@
 
   public static final String PROTOCOL_REDIR = "protocol";
 
-  public static final Log LOG = LogFactory.getLog(Fetcher.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Fetcher.class);
   
   public static class InputFormat extends SequenceFileInputFormat<Text, CrawlDatum> {
     /** Don't split inputs, to keep things polite. */
@@ -507,7 +507,7 @@
             hasMore = reader.next(url, datum);
             timelimitcount++;
           } catch (IOException e) {
-            LOG.fatal("QueueFeeder error reading input, record " + cnt, e);
+            LOG.error("QueueFeeder error reading input, record " + cnt, e);
             return;
           }
           continue;
@@ -532,7 +532,7 @@
                 feed--;
               }
             } catch (IOException e) {
-              LOG.fatal("QueueFeeder error reading input, record " + cnt, e);
+              LOG.error("QueueFeeder error reading input, record " + cnt, e);
               return;
             }
           }
@@ -796,9 +796,9 @@
         }
 
       } catch (Throwable e) {
-        if (LOG.isFatalEnabled()) {
-          e.printStackTrace(LogUtil.getFatalStream(LOG));
-          LOG.fatal("fetcher caught:"+e.toString());
+        if (LOG.isErrorEnabled()) {
+          e.printStackTrace(LogUtil.getErrorStream(LOG));
+          LOG.error("fetcher caught:"+e.toString());
         }
       } finally {
         if (fit != null) fetchQueues.finishFetchItem(fit);
@@ -964,9 +964,9 @@
           }
         }
       } catch (IOException e) {
-        if (LOG.isFatalEnabled()) {
-          e.printStackTrace(LogUtil.getFatalStream(LOG));
-          LOG.fatal("fetcher caught:"+e.toString());
+        if (LOG.isErrorEnabled()) {
+          e.printStackTrace(LogUtil.getErrorStream(LOG));
+          LOG.error("fetcher caught:"+e.toString());
         }
       }
 
@@ -1176,7 +1176,7 @@
       fetch(segment, threads, parsing);
       return 0;
     } catch (Exception e) {
-      LOG.fatal("Fetcher: " + StringUtils.stringifyException(e));
+      LOG.error("Fetcher: " + StringUtils.stringifyException(e));
       return -1;
     }
 
@@ -1191,8 +1191,8 @@
     if (agentName == null || agentName.trim().length() == 0) {
       String message = "Fetcher: No agents listed in 'http.agent.name'"
           + " property.";
-      if (LOG.isFatalEnabled()) {
-        LOG.fatal(message);
+      if (LOG.isErrorEnabled()) {
+        LOG.error(message);
       }
       throw new IllegalArgumentException(message);
     } else {
Index: src/java/org/apache/nutch/tools/proxy/SegmentHandler.java
===================================================================
--- src/java/org/apache/nutch/tools/proxy/SegmentHandler.java	(revision 1169488)
+++ src/java/org/apache/nutch/tools/proxy/SegmentHandler.java	(working copy)
@@ -25,8 +25,8 @@
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
@@ -52,7 +52,7 @@
  * XXX should turn this into a plugin?
  */
 public class SegmentHandler extends AbstractTestbedHandler {
-  private static final Log LOG = LogFactory.getLog(SegmentHandler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SegmentHandler.class);
   private Segment seg;
   
   private static HashMap<Integer,Integer> protoCodes = new HashMap<Integer,Integer>();
Index: src/java/org/apache/nutch/tools/proxy/LogDebugHandler.java
===================================================================
--- src/java/org/apache/nutch/tools/proxy/LogDebugHandler.java	(revision 1169488)
+++ src/java/org/apache/nutch/tools/proxy/LogDebugHandler.java	(working copy)
@@ -26,12 +26,12 @@
 import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.mortbay.jetty.Request;
 
 public class LogDebugHandler extends AbstractTestbedHandler implements Filter {
-  private static final Log LOG = LogFactory.getLog(LogDebugHandler.class);
+  private static final Logger LOG = LoggerFactory.getLogger(LogDebugHandler.class);
 
   @Override
   public void handle(Request req, HttpServletResponse res, String target,
Index: src/java/org/apache/nutch/tools/proxy/TestbedProxy.java
===================================================================
--- src/java/org/apache/nutch/tools/proxy/TestbedProxy.java	(revision 1169488)
+++ src/java/org/apache/nutch/tools/proxy/TestbedProxy.java	(working copy)
@@ -20,8 +20,8 @@
 import java.util.HashSet;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -37,7 +37,7 @@
 import org.mortbay.proxy.AsyncProxyServlet;
 
 public class TestbedProxy {
-  private static final Log LOG = LogFactory.getLog(TestbedProxy.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestbedProxy.class);
 
   /**
    * @param args
@@ -84,7 +84,7 @@
       } else if (args[i].equals("-seg")) {
         segs.add(new Path(args[++i]));
       } else {
-        LOG.fatal("Unknown argument: " + args[i]);
+        LOG.error("Unknown argument: " + args[i]);
         System.exit(-1);
       }
     }
Index: src/java/org/apache/nutch/tools/arc/ArcRecordReader.java
===================================================================
--- src/java/org/apache/nutch/tools/arc/ArcRecordReader.java	(revision 1169488)
+++ src/java/org/apache/nutch/tools/arc/ArcRecordReader.java	(working copy)
@@ -20,8 +20,8 @@
 import java.io.IOException;
 import java.util.zip.GZIPInputStream;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -50,7 +50,7 @@
 public class ArcRecordReader
   implements RecordReader<Text, BytesWritable> {
 
-  public static final Log LOG = LogFactory.getLog(ArcRecordReader.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ArcRecordReader.class);
 
   protected Configuration conf;
   protected long splitStart = 0;
Index: src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java
===================================================================
--- src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java	(revision 1169488)
+++ src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java	(working copy)
@@ -21,8 +21,8 @@
 import java.util.Date;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
@@ -73,7 +73,7 @@
   extends Configured
   implements Tool, Mapper<Text, BytesWritable, Text, NutchWritable> {
 
-  public static final Log LOG = LogFactory.getLog(ArcSegmentCreator.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ArcSegmentCreator.class);
   public static final String URL_VERSION = "arc.url.version";
   private JobConf jobConf;
   private URLFilters urlFilters;
@@ -233,8 +233,8 @@
         }
       }
       catch (IOException e) {
-        if (LOG.isFatalEnabled()) {
-          LOG.fatal("ArcSegmentCreator caught:" + StringUtils.stringifyException(e));
+        if (LOG.isErrorEnabled()) {
+          LOG.error("ArcSegmentCreator caught:" + StringUtils.stringifyException(e));
         }
       }
 
@@ -398,7 +398,7 @@
       return 0;
     }
     catch (Exception e) {
-      LOG.fatal("ArcSegmentCreator: " + StringUtils.stringifyException(e));
+      LOG.error("ArcSegmentCreator: " + StringUtils.stringifyException(e));
       return -1;
     }
   }
Index: src/java/org/apache/nutch/tools/ResolveUrls.java
===================================================================
--- src/java/org/apache/nutch/tools/ResolveUrls.java	(revision 1169488)
+++ src/java/org/apache/nutch/tools/ResolveUrls.java	(working copy)
@@ -33,8 +33,8 @@
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.nutch.util.URLUtil;
 
@@ -46,7 +46,7 @@
  */
 public class ResolveUrls {
 
-  public static final Log LOG = LogFactory.getLog(ResolveUrls.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ResolveUrls.class);
 
   private String urlsFile = null;
   private int numThreads = 100;
@@ -193,7 +193,7 @@
       resolve.resolveUrls();
     }
     catch (Exception e) {
-      LOG.fatal("ResolveUrls: " + StringUtils.stringifyException(e));
+      LOG.error("ResolveUrls: " + StringUtils.stringifyException(e));
     }
   }
 
Index: src/java/org/apache/nutch/tools/DmozParser.java
===================================================================
--- src/java/org/apache/nutch/tools/DmozParser.java	(revision 1169488)
+++ src/java/org/apache/nutch/tools/DmozParser.java	(working copy)
@@ -27,8 +27,8 @@
 import org.apache.xerces.util.XMLChar;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.fs.*;
@@ -39,7 +39,7 @@
 
 /** Utility that converts DMOZ RDF into a flat file of URLs to be injected. */
 public class DmozParser {
-  public static final Log LOG = LogFactory.getLog(DmozParser.class);
+  public static final Logger LOG = LoggerFactory.getLogger(DmozParser.class);
   
     long pages = 0;
 
@@ -237,21 +237,21 @@
      * Emit the exception message
      */
     public void error(SAXParseException spe) {
-      if (LOG.isFatalEnabled()) {
-        LOG.fatal("Error: " + spe.toString() + ": " + spe.getMessage());
-        spe.printStackTrace(LogUtil.getFatalStream(LOG));
+      if (LOG.isErrorEnabled()) {
+        LOG.error("Error: " + spe.toString() + ": " + spe.getMessage());
+        spe.printStackTrace(LogUtil.getErrorStream(LOG));
       }
     }
 
     /**
      * Emit the exception message, with line numbers
      */
-    public void fatalError(SAXParseException spe) {
-      if (LOG.isFatalEnabled()) {
-        LOG.fatal("Fatal err: " + spe.toString() + ": " + spe.getMessage());
-        LOG.fatal("Last known line is " + location.getLineNumber() +
+    public void errorError(SAXParseException spe) {
+      if (LOG.isErrorEnabled()) {
+        LOG.error("Fatal err: " + spe.toString() + ": " + spe.getMessage());
+        LOG.error("Last known line is " + location.getLineNumber() +
                   ", column " + location.getColumnNumber());
-        spe.printStackTrace(LogUtil.getFatalStream(LOG));
+        spe.printStackTrace(LogUtil.getErrorStream(LOG));
       }
     }
         
@@ -299,9 +299,9 @@
       InputSource is = new InputSource(in);
       reader.parse(is);
     } catch (Exception e) {
-      if (LOG.isFatalEnabled()) {
-        LOG.fatal(e.toString());
-        e.printStackTrace(LogUtil.getFatalStream(LOG));
+      if (LOG.isErrorEnabled()) {
+        LOG.error(e.toString());
+        e.printStackTrace(LogUtil.getErrorStream(LOG));
       }
       System.exit(0);
     } finally {
@@ -321,9 +321,9 @@
       }
     } 
     catch (Exception e) {
-      if (LOG.isFatalEnabled()) {
-        LOG.fatal(e.toString());
-        e.printStackTrace(LogUtil.getFatalStream(LOG));
+      if (LOG.isErrorEnabled()) {
+        LOG.error(e.toString());
+        e.printStackTrace(LogUtil.getErrorStream(LOG));
       }
       System.exit(0);
     } finally {
Index: src/java/org/apache/nutch/tools/CrawlDBScanner.java
===================================================================
--- src/java/org/apache/nutch/tools/CrawlDBScanner.java	(revision 1169488)
+++ src/java/org/apache/nutch/tools/CrawlDBScanner.java	(working copy)
@@ -20,8 +20,8 @@
 import java.text.SimpleDateFormat;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
@@ -59,7 +59,7 @@
 public class CrawlDBScanner extends Configured implements Tool,
     Mapper<Text,CrawlDatum,Text,CrawlDatum>, Reducer<Text,CrawlDatum,Text,CrawlDatum> {
 
-  public static final Log LOG = LogFactory.getLog(CrawlDBScanner.class);
+  public static final Logger LOG = LoggerFactory.getLogger(CrawlDBScanner.class);
 
   public CrawlDBScanner() {}
 
@@ -182,7 +182,7 @@
       scan(dbDir, output, args[2], status, text);
       return 0;
     } catch (Exception e) {
-      LOG.fatal("CrawlDBScanner: " + StringUtils.stringifyException(e));
+      LOG.error("CrawlDBScanner: " + StringUtils.stringifyException(e));
       return -1;
     }
   }
Index: src/java/org/apache/nutch/tools/FreeGenerator.java
===================================================================
--- src/java/org/apache/nutch/tools/FreeGenerator.java	(revision 1169488)
+++ src/java/org/apache/nutch/tools/FreeGenerator.java	(working copy)
@@ -23,8 +23,8 @@
 import java.util.Iterator;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
@@ -61,7 +61,7 @@
  * @author Andrzej Bialecki
  */
 public class FreeGenerator extends Configured implements Tool {
-  private static final Log LOG = LogFactory.getLog(FreeGenerator.class);
+  private static final Logger LOG = LoggerFactory.getLogger(FreeGenerator.class);
   
   private static final String FILTER_KEY = "free.generator.filter";
   private static final String NORMALIZE_KEY = "free.generator.normalize";
@@ -153,7 +153,7 @@
         } else if (args[i].equals("-normalize")) {
           normalize = true;
         } else {
-          LOG.fatal("Unknown argument: " + args[i] + ", exiting ...");
+          LOG.error("Unknown argument: " + args[i] + ", exiting ...");
           return -1;
         }
       }
@@ -184,7 +184,7 @@
     try {
       JobClient.runJob(job);
     } catch (Exception e) {
-      LOG.fatal("FAILED: " + StringUtils.stringifyException(e));
+      LOG.error("FAILED: " + StringUtils.stringifyException(e));
       return -1;
     }
     long end = System.currentTimeMillis();
Index: src/java/org/apache/nutch/tools/Benchmark.java
===================================================================
--- src/java/org/apache/nutch/tools/Benchmark.java	(revision 1169488)
+++ src/java/org/apache/nutch/tools/Benchmark.java	(working copy)
@@ -1,268 +1,268 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nutch.tools;
-
-import java.io.OutputStream;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.nutch.crawl.CrawlDb;
-import org.apache.nutch.crawl.CrawlDbReader;
-import org.apache.nutch.crawl.Generator;
-import org.apache.nutch.crawl.Injector;
-import org.apache.nutch.crawl.LinkDb;
-import org.apache.nutch.fetcher.Fetcher;
-import org.apache.nutch.parse.ParseSegment;
-import org.apache.nutch.util.NutchConfiguration;
-import org.apache.nutch.util.NutchJob;
-
-public class Benchmark extends Configured implements Tool {
-  private static final Log LOG = LogFactory.getLog(Benchmark.class);
-
-  public static void main(String[] args) throws Exception {
-    Configuration conf = NutchConfiguration.create();
-    int res = ToolRunner.run(conf, new Benchmark(), args);
-    System.exit(res);
-  }
-  
-  private static String getDate() {
-    return new SimpleDateFormat("yyyyMMddHHmmss").format
-      (new Date(System.currentTimeMillis()));
-  }
- 
-  private void createSeeds(FileSystem fs, Path seedsDir, int count) throws Exception {
-    OutputStream os = fs.create(new Path(seedsDir, "seeds"));
-    for (int i = 0; i < count; i++) {
-      String url = "http://www.test-" + i + ".com/\r\n";
-      os.write(url.getBytes());
-    }
-    os.flush();
-    os.close();
-  }
-  
-  public static final class BenchmarkResults {
-    Map<String,Map<String,Long>> timings = new HashMap<String,Map<String,Long>>();
-    List<String> runs = new ArrayList<String>();
-    List<String> stages = new ArrayList<String>();
-    int seeds, depth, threads;
-    boolean delete;
-    long topN;
-    long elapsed;
-    String plugins;
-    
-    public void addTiming(String stage, String run, long timing) {
-      if (!runs.contains(run)) {
-        runs.add(run);
-      }
-      if (!stages.contains(stage)) {
-        stages.add(stage);
-      }
-      Map<String,Long> t = timings.get(stage);
-      if (t == null) {
-        t = new HashMap<String,Long>();
-        timings.put(stage, t);
-      }
-      t.put(run, timing);
-    }
-    
-    public String toString() {
-      StringBuilder sb = new StringBuilder();
-      sb.append("* Plugins:\t" + plugins + "\n");
-      sb.append("* Seeds:\t" + seeds + "\n");
-      sb.append("* Depth:\t" + depth + "\n");
-      sb.append("* Threads:\t" + threads + "\n");
-      sb.append("* TopN:\t" + topN + "\n");
-      sb.append("* Delete:\t" + delete + "\n");
-      sb.append("* TOTAL ELAPSED:\t" + elapsed + "\n");
-      for (String stage : stages) {
-        Map<String,Long> timing = timings.get(stage);
-        if (timing == null) continue;
-        sb.append("- stage: " + stage + "\n");
-        for (String r : runs) {
-          Long Time = timing.get(r);
-          if (Time == null) {
-            continue;
-          }
-          sb.append("\trun " + r + "\t" + Time + "\n");
-        }
-      }
-      return sb.toString();
-    }
-    
-    public List<String> getStages() {
-      return stages;
-    }
-    public List<String> getRuns() {
-      return runs;
-    }
-  }
-  
-  public int run(String[] args) throws Exception {
-    String plugins = "protocol-http|parse-tika|scoring-opic|urlfilter-regex|urlnormalizer-pass";
-    int seeds = 1;
-    int depth = 10;
-    int threads = 10;
-    boolean delete = true;
-    long topN = Long.MAX_VALUE;
-    
-    if (args.length == 0) {
-      System.err.println("Usage: Benchmark [-seeds NN] [-depth NN] [-threads NN] [-keep] [-maxPerHost NN] [-plugins <regex>]");
-      System.err.println("\t-seeds NN\tcreate NN unique hosts in a seed list (default: 1)");
-      System.err.println("\t-depth NN\tperform NN crawl cycles (default: 10)");
-      System.err.println("\t-threads NN\tuse NN threads per Fetcher task (default: 10)");
-      System.err.println("\t-keep\tkeep segment data (default: delete after updatedb)");
-      System.err.println("\t-plugins <regex>\toverride 'plugin.includes'.");
-      System.err.println("\tNOTE: if not specified, this is reset to: " + plugins);
-      System.err.println("\tNOTE: if 'default' is specified then a value set in nutch-default/nutch-site is used.");
-      System.err.println("\t-maxPerHost NN\tmax. # of URLs per host in a fetchlist");
-      return -1;
-    }
-    int maxPerHost = Integer.MAX_VALUE;
-    for (int i = 0; i < args.length; i++) {
-      if (args[i].equals("-seeds")) {
-        seeds = Integer.parseInt(args[++i]);
-      } else if (args[i].equals("-threads")) {
-        threads = Integer.parseInt(args[++i]);
-      } else if (args[i].equals("-depth")) {
-        depth = Integer.parseInt(args[++i]);
-      } else if (args[i].equals("-keep")) {
-        delete = false;
-      } else if (args[i].equals("-plugins")) {
-        plugins = args[++i];
-      } else if (args[i].equalsIgnoreCase("-maxPerHost")) {
-        maxPerHost = Integer.parseInt(args[++i]);
-      } else {
-        LOG.fatal("Invalid argument: '" + args[i] + "'");
-        return -1;
-      }
-    }
-    BenchmarkResults res = benchmark(seeds, depth, threads, maxPerHost, topN, delete, plugins);
-    System.out.println(res);
-    return 0;
-  }
-  
-  public BenchmarkResults benchmark(int seeds, int depth, int threads, int maxPerHost,
-        long topN, boolean delete, String plugins) throws Exception {
-    Configuration conf = getConf();
-    conf.set("http.proxy.host", "localhost");
-    conf.setInt("http.proxy.port", 8181);
-    conf.set("http.agent.name", "test");
-    conf.set("http.robots.agents", "test,*");
-    if (!plugins.equals("default")) {
-      conf.set("plugin.includes", plugins);
-    }
-    conf.setInt(Generator.GENERATOR_MAX_COUNT, maxPerHost);
-    conf.set(Generator.GENERATOR_COUNT_MODE, Generator.GENERATOR_COUNT_VALUE_HOST);
-    JobConf job = new NutchJob(getConf());    
-    FileSystem fs = FileSystem.get(job);
-    Path dir = new Path(getConf().get("hadoop.tmp.dir"),
-            "bench-" + System.currentTimeMillis());
-    fs.mkdirs(dir);
-    Path rootUrlDir = new Path(dir, "seed");
-    fs.mkdirs(rootUrlDir);
-    createSeeds(fs, rootUrlDir, seeds);
-
-    if (LOG.isInfoEnabled()) {
-      LOG.info("crawl started in: " + dir);
-      LOG.info("rootUrlDir = " + rootUrlDir);
-      LOG.info("threads = " + threads);
-      LOG.info("depth = " + depth);      
-    }
-    BenchmarkResults res = new BenchmarkResults();
-    res.delete = delete;
-    res.depth = depth;
-    res.plugins = plugins;
-    res.seeds = seeds;
-    res.threads = threads;
-    res.topN = topN;
-    Path crawlDb = new Path(dir + "/crawldb");
-    Path linkDb = new Path(dir + "/linkdb");
-    Path segments = new Path(dir + "/segments");
-    res.elapsed = System.currentTimeMillis();
-    Injector injector = new Injector(getConf());
-    Generator generator = new Generator(getConf());
-    Fetcher fetcher = new Fetcher(getConf());
-    ParseSegment parseSegment = new ParseSegment(getConf());
-    CrawlDb crawlDbTool = new CrawlDb(getConf());
-    LinkDb linkDbTool = new LinkDb(getConf());
-      
-    // initialize crawlDb
-    long start = System.currentTimeMillis();
-    injector.inject(crawlDb, rootUrlDir);
-    long delta = System.currentTimeMillis() - start;
-    res.addTiming("inject", "0", delta);
-    int i;
-    for (i = 0; i < depth; i++) {             // generate new segment
-      start = System.currentTimeMillis();
-      Path[] segs = generator.generate(crawlDb, segments, -1, topN, System
-          .currentTimeMillis());
-      delta = System.currentTimeMillis() - start;
-      res.addTiming("generate", i + "", delta);
-      if (segs == null) {
-        LOG.info("Stopping at depth=" + i + " - no more URLs to fetch.");
-        break;
-      }
-      start = System.currentTimeMillis();
-      fetcher.fetch(segs[0], threads, org.apache.nutch.fetcher.Fetcher.isParsing(getConf()));  // fetch it
-      delta = System.currentTimeMillis() - start;
-      res.addTiming("fetch", i + "", delta);
-      if (!Fetcher.isParsing(job)) {
-        start = System.currentTimeMillis();
-        parseSegment.parse(segs[0]);    // parse it, if needed
-        delta = System.currentTimeMillis() - start;
-        res.addTiming("parse", i + "", delta);
-      }
-      start = System.currentTimeMillis();
-      crawlDbTool.update(crawlDb, segs, true, true); // update crawldb
-      delta = System.currentTimeMillis() - start;
-      res.addTiming("update", i + "", delta);
-      start = System.currentTimeMillis();
-      linkDbTool.invert(linkDb, segs, true, true, false); // invert links
-      delta = System.currentTimeMillis() - start;
-      res.addTiming("invert", i + "", delta);
-      // delete data
-      if (delete) {
-        for (Path p : segs) {
-          fs.delete(p, true);
-        }
-      }
-    }
-    if (i == 0) {
-      LOG.warn("No URLs to fetch - check your seed list and URL filters.");
-    }
-    if (LOG.isInfoEnabled()) { LOG.info("crawl finished: " + dir); }
-    res.elapsed = System.currentTimeMillis() - res.elapsed;
-    CrawlDbReader dbreader = new CrawlDbReader();
-    dbreader.processStatJob(crawlDb.toString(), conf, false);
-    return res;
-  }
-
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nutch.tools;
+
+import java.io.OutputStream;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.nutch.crawl.CrawlDb;
+import org.apache.nutch.crawl.CrawlDbReader;
+import org.apache.nutch.crawl.Generator;
+import org.apache.nutch.crawl.Injector;
+import org.apache.nutch.crawl.LinkDb;
+import org.apache.nutch.fetcher.Fetcher;
+import org.apache.nutch.parse.ParseSegment;
+import org.apache.nutch.util.NutchConfiguration;
+import org.apache.nutch.util.NutchJob;
+
+public class Benchmark extends Configured implements Tool {
+  private static final Logger LOG = LoggerFactory.getLogger(Benchmark.class);
+
+  public static void main(String[] args) throws Exception {
+    Configuration conf = NutchConfiguration.create();
+    int res = ToolRunner.run(conf, new Benchmark(), args);
+    System.exit(res);
+  }
+  
+  private static String getDate() {
+    return new SimpleDateFormat("yyyyMMddHHmmss").format
+      (new Date(System.currentTimeMillis()));
+  }
+ 
+  private void createSeeds(FileSystem fs, Path seedsDir, int count) throws Exception {
+    OutputStream os = fs.create(new Path(seedsDir, "seeds"));
+    for (int i = 0; i < count; i++) {
+      String url = "http://www.test-" + i + ".com/\r\n";
+      os.write(url.getBytes());
+    }
+    os.flush();
+    os.close();
+  }
+  
+  public static final class BenchmarkResults {
+    Map<String,Map<String,Long>> timings = new HashMap<String,Map<String,Long>>();
+    List<String> runs = new ArrayList<String>();
+    List<String> stages = new ArrayList<String>();
+    int seeds, depth, threads;
+    boolean delete;
+    long topN;
+    long elapsed;
+    String plugins;
+    
+    public void addTiming(String stage, String run, long timing) {
+      if (!runs.contains(run)) {
+        runs.add(run);
+      }
+      if (!stages.contains(stage)) {
+        stages.add(stage);
+      }
+      Map<String,Long> t = timings.get(stage);
+      if (t == null) {
+        t = new HashMap<String,Long>();
+        timings.put(stage, t);
+      }
+      t.put(run, timing);
+    }
+    
+    public String toString() {
+      StringBuilder sb = new StringBuilder();
+      sb.append("* Plugins:\t" + plugins + "\n");
+      sb.append("* Seeds:\t" + seeds + "\n");
+      sb.append("* Depth:\t" + depth + "\n");
+      sb.append("* Threads:\t" + threads + "\n");
+      sb.append("* TopN:\t" + topN + "\n");
+      sb.append("* Delete:\t" + delete + "\n");
+      sb.append("* TOTAL ELAPSED:\t" + elapsed + "\n");
+      for (String stage : stages) {
+        Map<String,Long> timing = timings.get(stage);
+        if (timing == null) continue;
+        sb.append("- stage: " + stage + "\n");
+        for (String r : runs) {
+          Long Time = timing.get(r);
+          if (Time == null) {
+            continue;
+          }
+          sb.append("\trun " + r + "\t" + Time + "\n");
+        }
+      }
+      return sb.toString();
+    }
+    
+    public List<String> getStages() {
+      return stages;
+    }
+    public List<String> getRuns() {
+      return runs;
+    }
+  }
+  
+  public int run(String[] args) throws Exception {
+    String plugins = "protocol-http|parse-tika|scoring-opic|urlfilter-regex|urlnormalizer-pass";
+    int seeds = 1;
+    int depth = 10;
+    int threads = 10;
+    boolean delete = true;
+    long topN = Long.MAX_VALUE;
+    
+    if (args.length == 0) {
+      System.err.println("Usage: Benchmark [-seeds NN] [-depth NN] [-threads NN] [-keep] [-maxPerHost NN] [-plugins <regex>]");
+      System.err.println("\t-seeds NN\tcreate NN unique hosts in a seed list (default: 1)");
+      System.err.println("\t-depth NN\tperform NN crawl cycles (default: 10)");
+      System.err.println("\t-threads NN\tuse NN threads per Fetcher task (default: 10)");
+      System.err.println("\t-keep\tkeep segment data (default: delete after updatedb)");
+      System.err.println("\t-plugins <regex>\toverride 'plugin.includes'.");
+      System.err.println("\tNOTE: if not specified, this is reset to: " + plugins);
+      System.err.println("\tNOTE: if 'default' is specified then a value set in nutch-default/nutch-site is used.");
+      System.err.println("\t-maxPerHost NN\tmax. # of URLs per host in a fetchlist");
+      return -1;
+    }
+    int maxPerHost = Integer.MAX_VALUE;
+    for (int i = 0; i < args.length; i++) {
+      if (args[i].equals("-seeds")) {
+        seeds = Integer.parseInt(args[++i]);
+      } else if (args[i].equals("-threads")) {
+        threads = Integer.parseInt(args[++i]);
+      } else if (args[i].equals("-depth")) {
+        depth = Integer.parseInt(args[++i]);
+      } else if (args[i].equals("-keep")) {
+        delete = false;
+      } else if (args[i].equals("-plugins")) {
+        plugins = args[++i];
+      } else if (args[i].equalsIgnoreCase("-maxPerHost")) {
+        maxPerHost = Integer.parseInt(args[++i]);
+      } else {
+        LOG.info("Invalid argument: '" + args[i] + "'");
+        return -1;
+      }
+    }
+    BenchmarkResults res = benchmark(seeds, depth, threads, maxPerHost, topN, delete, plugins);
+    System.out.println(res);
+    return 0;
+  }
+  
+  public BenchmarkResults benchmark(int seeds, int depth, int threads, int maxPerHost,
+        long topN, boolean delete, String plugins) throws Exception {
+    Configuration conf = getConf();
+    conf.set("http.proxy.host", "localhost");
+    conf.setInt("http.proxy.port", 8181);
+    conf.set("http.agent.name", "test");
+    conf.set("http.robots.agents", "test,*");
+    if (!plugins.equals("default")) {
+      conf.set("plugin.includes", plugins);
+    }
+    conf.setInt(Generator.GENERATOR_MAX_COUNT, maxPerHost);
+    conf.set(Generator.GENERATOR_COUNT_MODE, Generator.GENERATOR_COUNT_VALUE_HOST);
+    JobConf job = new NutchJob(getConf());    
+    FileSystem fs = FileSystem.get(job);
+    Path dir = new Path(getConf().get("hadoop.tmp.dir"),
+            "bench-" + System.currentTimeMillis());
+    fs.mkdirs(dir);
+    Path rootUrlDir = new Path(dir, "seed");
+    fs.mkdirs(rootUrlDir);
+    createSeeds(fs, rootUrlDir, seeds);
+
+    if (LOG.isInfoEnabled()) {
+      LOG.info("crawl started in: " + dir);
+      LOG.info("rootUrlDir = " + rootUrlDir);
+      LOG.info("threads = " + threads);
+      LOG.info("depth = " + depth);      
+    }
+    BenchmarkResults res = new BenchmarkResults();
+    res.delete = delete;
+    res.depth = depth;
+    res.plugins = plugins;
+    res.seeds = seeds;
+    res.threads = threads;
+    res.topN = topN;
+    Path crawlDb = new Path(dir + "/crawldb");
+    Path linkDb = new Path(dir + "/linkdb");
+    Path segments = new Path(dir + "/segments");
+    res.elapsed = System.currentTimeMillis();
+    Injector injector = new Injector(getConf());
+    Generator generator = new Generator(getConf());
+    Fetcher fetcher = new Fetcher(getConf());
+    ParseSegment parseSegment = new ParseSegment(getConf());
+    CrawlDb crawlDbTool = new CrawlDb(getConf());
+    LinkDb linkDbTool = new LinkDb(getConf());
+      
+    // initialize crawlDb
+    long start = System.currentTimeMillis();
+    injector.inject(crawlDb, rootUrlDir);
+    long delta = System.currentTimeMillis() - start;
+    res.addTiming("inject", "0", delta);
+    int i;
+    for (i = 0; i < depth; i++) {             // generate new segment
+      start = System.currentTimeMillis();
+      Path[] segs = generator.generate(crawlDb, segments, -1, topN, System
+          .currentTimeMillis());
+      delta = System.currentTimeMillis() - start;
+      res.addTiming("generate", i + "", delta);
+      if (segs == null) {
+        LOG.info("Stopping at depth=" + i + " - no more URLs to fetch.");
+        break;
+      }
+      start = System.currentTimeMillis();
+      fetcher.fetch(segs[0], threads, org.apache.nutch.fetcher.Fetcher.isParsing(getConf()));  // fetch it
+      delta = System.currentTimeMillis() - start;
+      res.addTiming("fetch", i + "", delta);
+      if (!Fetcher.isParsing(job)) {
+        start = System.currentTimeMillis();
+        parseSegment.parse(segs[0]);    // parse it, if needed
+        delta = System.currentTimeMillis() - start;
+        res.addTiming("parse", i + "", delta);
+      }
+      start = System.currentTimeMillis();
+      crawlDbTool.update(crawlDb, segs, true, true); // update crawldb
+      delta = System.currentTimeMillis() - start;
+      res.addTiming("update", i + "", delta);
+      start = System.currentTimeMillis();
+      linkDbTool.invert(linkDb, segs, true, true, false); // invert links
+      delta = System.currentTimeMillis() - start;
+      res.addTiming("invert", i + "", delta);
+      // delete data
+      if (delete) {
+        for (Path p : segs) {
+          fs.delete(p, true);
+        }
+      }
+    }
+    if (i == 0) {
+      LOG.warn("No URLs to fetch - check your seed list and URL filters.");
+    }
+    if (LOG.isInfoEnabled()) { LOG.info("crawl finished: " + dir); }
+    res.elapsed = System.currentTimeMillis() - res.elapsed;
+    CrawlDbReader dbreader = new CrawlDbReader();
+    dbreader.processStatJob(crawlDb.toString(), conf, false);
+    return res;
+  }
+
+}
Index: src/java/org/apache/nutch/protocol/ProtocolFactory.java
===================================================================
--- src/java/org/apache/nutch/protocol/ProtocolFactory.java	(revision 1169488)
+++ src/java/org/apache/nutch/protocol/ProtocolFactory.java	(working copy)
@@ -21,8 +21,8 @@
 import java.net.MalformedURLException;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.plugin.*;
 import org.apache.nutch.util.ObjectCache;
@@ -38,7 +38,7 @@
  */
 public class ProtocolFactory {
 
-  public static final Log LOG = LogFactory.getLog(ProtocolFactory.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ProtocolFactory.class);
 
   private ExtensionPoint extensionPoint;
 
Index: src/java/org/apache/nutch/segment/SegmentMerger.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentMerger.java	(revision 1169488)
+++ src/java/org/apache/nutch/segment/SegmentMerger.java	(working copy)
@@ -22,8 +22,8 @@
 import java.util.Iterator;
 import java.util.TreeMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileStatus;
@@ -113,7 +113,7 @@
 public class SegmentMerger extends Configured implements
     Mapper<Text, MetaWrapper, Text, MetaWrapper>,
     Reducer<Text, MetaWrapper, Text, MetaWrapper> {
-  private static final Log LOG = LogFactory.getLog(SegmentMerger.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SegmentMerger.class);
 
   private static final String SEGMENT_PART_KEY = "part";
   private static final String SEGMENT_SLICE_KEY = "slice";
Index: src/java/org/apache/nutch/segment/SegmentReader.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentReader.java	(revision 1169488)
+++ src/java/org/apache/nutch/segment/SegmentReader.java	(working copy)
@@ -33,8 +33,8 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileStatus;
@@ -74,7 +74,7 @@
 public class SegmentReader extends Configured implements
     Reducer<Text, NutchWritable, Text, Text> {
 
-  public static final Log LOG = LogFactory.getLog(SegmentReader.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SegmentReader.class);
 
   long recNo = 0L;
   
Index: src/java/org/apache/nutch/segment/SegmentMergeFilters.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentMergeFilters.java	(revision 1169488)
+++ src/java/org/apache/nutch/segment/SegmentMergeFilters.java	(working copy)
@@ -18,8 +18,8 @@
 
 import java.util.Collection;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -39,7 +39,7 @@
  * 
  */
 public class SegmentMergeFilters {
-  private static final Log LOG = LogFactory.getLog(SegmentMergeFilters.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SegmentMergeFilters.class);
   private SegmentMergeFilter[] filters;
 
   public SegmentMergeFilters(Configuration conf) {
Index: src/java/org/apache/nutch/scoring/webgraph/WebGraph.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/WebGraph.java	(revision 1169488)
+++ src/java/org/apache/nutch/scoring/webgraph/WebGraph.java	(working copy)
@@ -34,8 +34,8 @@
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileStatus;
@@ -93,7 +93,7 @@
   extends Configured
   implements Tool {
 
-  public static final Log LOG = LogFactory.getLog(WebGraph.class);
+  public static final Logger LOG = LoggerFactory.getLogger(WebGraph.class);
   public static final String LOCK_NAME = ".locked";
   public static final String INLINK_DIR = "inlinks";
   public static final String OUTLINK_DIR = "outlinks";
@@ -663,7 +663,7 @@
       return 0;
     }
     catch (Exception e) {
-      LOG.fatal("WebGraph: " + StringUtils.stringifyException(e));
+      LOG.error("WebGraph: " + StringUtils.stringifyException(e));
       return -2;
     }
   }
Index: src/java/org/apache/nutch/scoring/webgraph/ScoreUpdater.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/ScoreUpdater.java	(revision 1169488)
+++ src/java/org/apache/nutch/scoring/webgraph/ScoreUpdater.java	(working copy)
@@ -28,8 +28,8 @@
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -66,7 +66,7 @@
   implements Tool, Mapper<Text, Writable, Text, ObjectWritable>,
   Reducer<Text, ObjectWritable, Text, CrawlDatum> {
 
-  public static final Log LOG = LogFactory.getLog(ScoreUpdater.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ScoreUpdater.class);
 
   private JobConf conf;
   private float clearScore = 0.0f;
@@ -244,7 +244,7 @@
       return 0;
     }
     catch (Exception e) {
-      LOG.fatal("ScoreUpdater: " + StringUtils.stringifyException(e));
+      LOG.error("ScoreUpdater: " + StringUtils.stringifyException(e));
       return -1;
     }
   }
Index: src/java/org/apache/nutch/scoring/webgraph/NodeDumper.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/NodeDumper.java	(revision 1169488)
+++ src/java/org/apache/nutch/scoring/webgraph/NodeDumper.java	(working copy)
@@ -27,8 +27,8 @@
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
@@ -65,7 +65,7 @@
   extends Configured
   implements Tool {
 
-  public static final Log LOG = LogFactory.getLog(NodeDumper.class);
+  public static final Logger LOG = LoggerFactory.getLogger(NodeDumper.class);
 
   private static enum DumpType {
     INLINKS,
@@ -271,7 +271,7 @@
       return 0;
     }
     catch (Exception e) {
-      LOG.fatal("NodeDumper: " + StringUtils.stringifyException(e));
+      LOG.error("NodeDumper: " + StringUtils.stringifyException(e));
       return -2;
     }
   }
Index: src/java/org/apache/nutch/scoring/webgraph/LinkRank.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/LinkRank.java	(revision 1169488)
+++ src/java/org/apache/nutch/scoring/webgraph/LinkRank.java	(working copy)
@@ -34,8 +34,8 @@
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -72,7 +72,7 @@
   extends Configured
   implements Tool {
 
-  public static final Log LOG = LogFactory.getLog(LinkRank.class);
+  public static final Logger LOG = LoggerFactory.getLogger(LinkRank.class);
   private static final String NUM_NODES = "_num_nodes_";
 
   /**
@@ -687,7 +687,7 @@
       return 0;
     }
     catch (Exception e) {
-      LOG.fatal("LinkAnalysis: " + StringUtils.stringifyException(e));
+      LOG.error("LinkAnalysis: " + StringUtils.stringifyException(e));
       return -2;
     }
   }
Index: src/java/org/apache/nutch/scoring/webgraph/LinkDumper.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/LinkDumper.java	(revision 1169488)
+++ src/java/org/apache/nutch/scoring/webgraph/LinkDumper.java	(working copy)
@@ -33,8 +33,8 @@
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -75,7 +75,7 @@
   extends Configured
   implements Tool {
 
-  public static final Log LOG = LogFactory.getLog(LinkDumper.class);
+  public static final Logger LOG = LoggerFactory.getLogger(LinkDumper.class);
   public static final String DUMP_DIR = "linkdump";
 
   /**
@@ -454,7 +454,7 @@
       return 0;
     }
     catch (Exception e) {
-      LOG.fatal("LinkDumper: " + StringUtils.stringifyException(e));
+      LOG.error("LinkDumper: " + StringUtils.stringifyException(e));
       return -2;
     }
   }
Index: src/java/org/apache/nutch/scoring/webgraph/Loops.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/Loops.java	(revision 1169488)
+++ src/java/org/apache/nutch/scoring/webgraph/Loops.java	(working copy)
@@ -35,8 +35,8 @@
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -80,7 +80,7 @@
   extends Configured
   implements Tool {
 
-  public static final Log LOG = LogFactory.getLog(Loops.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Loops.class);
   public static final String LOOPS_DIR = "loops";
   public static final String ROUTES_DIR = "routes";
 
@@ -605,7 +605,7 @@
       return 0;
     }
     catch (Exception e) {
-      LOG.fatal("Loops: " + StringUtils.stringifyException(e));
+      LOG.error("Loops: " + StringUtils.stringifyException(e));
       return -2;
     }
   }
Index: src/java/org/apache/nutch/net/URLNormalizers.java
===================================================================
--- src/java/org/apache/nutch/net/URLNormalizers.java	(revision 1169488)
+++ src/java/org/apache/nutch/net/URLNormalizers.java	(working copy)
@@ -28,8 +28,8 @@
 import java.util.Set;
 import java.util.Vector;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.plugin.Extension;
 import org.apache.nutch.plugin.ExtensionPoint;
@@ -97,7 +97,7 @@
   public static final String SCOPE_OUTLINK = "outlink";
   
 
-  public static final Log LOG = LogFactory.getLog(URLNormalizers.class);
+  public static final Logger LOG = LoggerFactory.getLogger(URLNormalizers.class);
 
   /* Empty extension list for caching purposes. */
   private final List<Extension> EMPTY_EXTENSION_LIST = Collections.EMPTY_LIST;
Index: src/java/org/apache/nutch/crawl/CrawlDbReader.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDbReader.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/CrawlDbReader.java	(working copy)
@@ -27,8 +27,8 @@
 import java.util.TreeMap;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -68,7 +68,7 @@
  */
 public class CrawlDbReader implements Closeable {
 
-  public static final Log LOG = LogFactory.getLog(CrawlDbReader.class);
+  public static final Logger LOG = LoggerFactory.getLogger(CrawlDbReader.class);
   
   public static final int STD_FORMAT = 0;
   public static final int CSV_FORMAT = 1;
Index: src/java/org/apache/nutch/crawl/LinkDb.java
===================================================================
--- src/java/org/apache/nutch/crawl/LinkDb.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/LinkDb.java	(working copy)
@@ -23,8 +23,8 @@
 import java.net.*;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.fs.*;
@@ -45,7 +45,7 @@
 /** Maintains an inverted link map, listing incoming links for each url. */
 public class LinkDb extends Configured implements Tool, Mapper<Text, ParseData, Text, Inlinks> {
 
-  public static final Log LOG = LogFactory.getLog(LinkDb.class);
+  public static final Logger LOG = LoggerFactory.getLogger(LinkDb.class);
 
   public static final String CURRENT_NAME = "current";
   public static final String LOCK_NAME = ".locked";
@@ -292,7 +292,7 @@
       invert(db, segs.toArray(new Path[segs.size()]), normalize, filter, force);
       return 0;
     } catch (Exception e) {
-      LOG.fatal("LinkDb: " + StringUtils.stringifyException(e));
+      LOG.error("LinkDb: " + StringUtils.stringifyException(e));
       return -1;
     }
   }
Index: src/java/org/apache/nutch/crawl/LinkDbMerger.java
===================================================================
--- src/java/org/apache/nutch/crawl/LinkDbMerger.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/LinkDbMerger.java	(working copy)
@@ -22,8 +22,8 @@
 import java.util.Iterator;
 import java.util.Random;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
@@ -64,7 +64,7 @@
  * @author Andrzej Bialecki
  */
 public class LinkDbMerger extends Configured implements Tool, Reducer<Text, Inlinks, Text, Inlinks> {
-  private static final Log LOG = LogFactory.getLog(LinkDbMerger.class);
+  private static final Logger LOG = LoggerFactory.getLogger(LinkDbMerger.class);
   
   private int maxInlinks;
   
@@ -178,7 +178,7 @@
       merge(output, dbs.toArray(new Path[dbs.size()]), normalize, filter);
       return 0;
     } catch (Exception e) {
-      LOG.fatal("LinkDbMerger: " + StringUtils.stringifyException(e));
+      LOG.error("LinkDbMerger: " + StringUtils.stringifyException(e));
       return -1;
     }
   }
Index: src/java/org/apache/nutch/crawl/MapWritable.java
===================================================================
--- src/java/org/apache/nutch/crawl/MapWritable.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/MapWritable.java	(working copy)
@@ -29,8 +29,8 @@
 import java.util.Set;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.DataInputBuffer;
@@ -61,7 +61,7 @@
  */
 public class MapWritable implements Writable {
 
-  public static final Log LOG = LogFactory.getLog(MapWritable.class);
+  public static final Logger LOG = LoggerFactory.getLogger(MapWritable.class);
 
   private KeyValueEntry fFirst;
 
Index: src/java/org/apache/nutch/crawl/Crawl.java
===================================================================
--- src/java/org/apache/nutch/crawl/Crawl.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/Crawl.java	(working copy)
@@ -22,8 +22,8 @@
 
 // Commons Logging imports
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
@@ -40,7 +40,7 @@
 import org.apache.nutch.fetcher.Fetcher;
 
 public class Crawl extends Configured implements Tool {
-  public static final Log LOG = LogFactory.getLog(Crawl.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Crawl.class);
 
   private static String getDate() {
     return new SimpleDateFormat("yyyyMMddHHmmss").format
Index: src/java/org/apache/nutch/crawl/LinkDbFilter.java
===================================================================
--- src/java/org/apache/nutch/crawl/LinkDbFilter.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/LinkDbFilter.java	(working copy)
@@ -20,8 +20,8 @@
 import java.io.IOException;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Mapper;
@@ -53,7 +53,7 @@
   
   private String scope;
   
-  public static final Log LOG = LogFactory.getLog(LinkDbFilter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(LinkDbFilter.class);
 
   private Text newKey = new Text();
   
Index: src/java/org/apache/nutch/crawl/Injector.java
===================================================================
--- src/java/org/apache/nutch/crawl/Injector.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/Injector.java	(working copy)
@@ -22,8 +22,8 @@
 import java.util.*;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.fs.*;
@@ -48,7 +48,7 @@
  * e.g. http://www.nutch.org/ \t nutch.score=10 \t nutch.fetchInterval=2592000 \t userType=open_source
  **/
 public class Injector extends Configured implements Tool {
-  public static final Log LOG = LogFactory.getLog(Injector.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Injector.class);
   
   /** metadata key reserved for setting a custom score for a specific URL */
   public static String nutchScoreMDName = "nutch.score";
@@ -248,7 +248,7 @@
       inject(new Path(args[0]), new Path(args[1]));
       return 0;
     } catch (Exception e) {
-      LOG.fatal("Injector: " + StringUtils.stringifyException(e));
+      LOG.error("Injector: " + StringUtils.stringifyException(e));
       return -1;
     }
   }
Index: src/java/org/apache/nutch/crawl/CrawlDb.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDb.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/CrawlDb.java	(working copy)
@@ -22,8 +22,8 @@
 import java.util.*;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.fs.*;
@@ -42,7 +42,7 @@
  * crawldb accordingly.
  */
 public class CrawlDb extends Configured implements Tool {
-  public static final Log LOG = LogFactory.getLog(CrawlDb.class);
+  public static final Logger LOG = LoggerFactory.getLogger(CrawlDb.class);
 
   public static final String CRAWLDB_ADDITIONS_ALLOWED = "db.update.additions.allowed";
 
@@ -203,7 +203,7 @@
       update(new Path(args[0]), dirs.toArray(new Path[dirs.size()]), normalize, filter, additionsAllowed, force);
       return 0;
     } catch (Exception e) {
-      LOG.fatal("CrawlDb update: " + StringUtils.stringifyException(e));
+      LOG.error("CrawlDb update: " + StringUtils.stringifyException(e));
       return -1;
     }
   }
Index: src/java/org/apache/nutch/crawl/CrawlDbMerger.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDbMerger.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/CrawlDbMerger.java	(working copy)
@@ -23,8 +23,8 @@
 import java.util.Map.Entry;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -54,7 +54,7 @@
  * @author Andrzej Bialecki
  */
 public class CrawlDbMerger extends Configured implements Tool {
-  private static final Log LOG = LogFactory.getLog(CrawlDbMerger.class);
+  private static final Logger LOG = LoggerFactory.getLogger(CrawlDbMerger.class);
 
   public static class Merger extends MapReduceBase implements Reducer<Text, CrawlDatum, Text, CrawlDatum> {
     private org.apache.hadoop.io.MapWritable meta;
@@ -190,7 +190,7 @@
       merge(output, dbs.toArray(new Path[dbs.size()]), normalize, filter);
       return 0;
     } catch (Exception e) {
-      LOG.fatal("CrawlDb merge: " + StringUtils.stringifyException(e));
+      LOG.error("CrawlDb merge: " + StringUtils.stringifyException(e));
       return -1;
     }
   }
Index: src/java/org/apache/nutch/crawl/URLPartitioner.java
===================================================================
--- src/java/org/apache/nutch/crawl/URLPartitioner.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/URLPartitioner.java	(working copy)
@@ -22,8 +22,8 @@
 import java.net.MalformedURLException;
 import java.net.UnknownHostException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.*;
 import org.apache.nutch.net.URLNormalizers;
@@ -34,7 +34,7 @@
  * parameter 'partition.url.mode' which can be 'byHost', 'byDomain' or 'byIP'
  */
 public class URLPartitioner implements Partitioner<Text,Writable> {
-  private static final Log LOG = LogFactory.getLog(URLPartitioner.class);
+  private static final Logger LOG = LoggerFactory.getLogger(URLPartitioner.class);
 
   public static final String PARTITION_MODE_KEY = "partition.url.mode";
 
Index: src/java/org/apache/nutch/crawl/CrawlDbFilter.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDbFilter.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/CrawlDbFilter.java	(working copy)
@@ -19,8 +19,8 @@
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Mapper;
@@ -54,7 +54,7 @@
   
   private String scope;
 
-  public static final Log LOG = LogFactory.getLog(CrawlDbFilter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(CrawlDbFilter.class);
 
   public void configure(JobConf job) {
     urlFiltering = job.getBoolean(URL_FILTERING, false);
Index: src/java/org/apache/nutch/crawl/Generator.java
===================================================================
--- src/java/org/apache/nutch/crawl/Generator.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/Generator.java	(working copy)
@@ -23,8 +23,8 @@
 import java.text.*;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
@@ -57,7 +57,7 @@
  **/
 public class Generator extends Configured implements Tool {
 
-  public static final Log LOG = LogFactory.getLog(Generator.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Generator.class);
 
   public static final String GENERATE_UPDATE_CRAWLDB = "generate.update.crawldb";
   public static final String GENERATOR_MIN_SCORE = "generate.min.score";
@@ -693,7 +693,7 @@
           norm, force, maxNumSegments);
       if (segs == null) return -1;
     } catch (Exception e) {
-      LOG.fatal("Generator: " + StringUtils.stringifyException(e));
+      LOG.error("Generator: " + StringUtils.stringifyException(e));
       return -1;
     }
     return 0;
Index: src/java/org/apache/nutch/crawl/LinkDbReader.java
===================================================================
--- src/java/org/apache/nutch/crawl/LinkDbReader.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/LinkDbReader.java	(working copy)
@@ -20,8 +20,8 @@
 import java.io.IOException;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.*;
@@ -40,7 +40,7 @@
 
 /** . */
 public class LinkDbReader extends Configured implements Tool, Closeable {
-  public static final Log LOG = LogFactory.getLog(LinkDbReader.class);
+  public static final Logger LOG = LoggerFactory.getLogger(LinkDbReader.class);
 
   private static final Partitioner<WritableComparable, Writable> PARTITIONER = new HashPartitioner<WritableComparable, Writable>();
 
@@ -149,7 +149,7 @@
         return -1;
       }
     } catch (Exception e) {
-      LOG.fatal("LinkDbReader: " + StringUtils.stringifyException(e));
+      LOG.error("LinkDbReader: " + StringUtils.stringifyException(e));
       return -1;
     }
   }
Index: src/java/org/apache/nutch/crawl/CrawlDbReducer.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDbReducer.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/CrawlDbReducer.java	(working copy)
@@ -24,8 +24,8 @@
 import java.io.IOException;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.*;
@@ -36,7 +36,7 @@
 
 /** Merge new page entries with existing entries. */
 public class CrawlDbReducer implements Reducer<Text, CrawlDatum, Text, CrawlDatum> {
-  public static final Log LOG = LogFactory.getLog(CrawlDbReducer.class);
+  public static final Logger LOG = LoggerFactory.getLogger(CrawlDbReducer.class);
   
   private int retryMax;
   private CrawlDatum result = new CrawlDatum();
Index: src/java/org/apache/nutch/crawl/SignatureFactory.java
===================================================================
--- src/java/org/apache/nutch/crawl/SignatureFactory.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/SignatureFactory.java	(working copy)
@@ -18,8 +18,8 @@
 package org.apache.nutch.crawl;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Hadoop imports
 import org.apache.hadoop.conf.Configuration;
@@ -33,7 +33,7 @@
  * @author Andrzej Bialecki &lt;ab@getopt.org&gt;
  */
 public class SignatureFactory {
-  private static final Log LOG = LogFactory.getLog(SignatureFactory.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SignatureFactory.class);
 
   private SignatureFactory() {}                   // no public ctor
 
Index: src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java
===================================================================
--- src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java	(working copy)
@@ -17,8 +17,8 @@
 
 package org.apache.nutch.crawl;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
@@ -31,7 +31,7 @@
  * @author Andrzej Bialecki
  */
 public abstract class AbstractFetchSchedule extends Configured implements FetchSchedule {
-  private static final Log LOG = LogFactory.getLog(AbstractFetchSchedule.class);
+  private static final Logger LOG = LoggerFactory.getLogger(AbstractFetchSchedule.class);
   
   protected int defaultInterval;
   protected int maxInterval;
Index: src/java/org/apache/nutch/crawl/FetchScheduleFactory.java
===================================================================
--- src/java/org/apache/nutch/crawl/FetchScheduleFactory.java	(revision 1169488)
+++ src/java/org/apache/nutch/crawl/FetchScheduleFactory.java	(working copy)
@@ -17,15 +17,15 @@
 
 package org.apache.nutch.crawl;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.util.ObjectCache;
 
 /** Creates and caches a {@link FetchSchedule} implementation. */
 public class FetchScheduleFactory {
 
-  public static final Log LOG = LogFactory.getLog(FetchScheduleFactory.class);
+  public static final Logger LOG = LoggerFactory.getLogger(FetchScheduleFactory.class);
 
   private FetchScheduleFactory() {}                   // no public ctor
 
Index: src/java/org/apache/nutch/parse/ParsePluginsReader.java
===================================================================
--- src/java/org/apache/nutch/parse/ParsePluginsReader.java	(revision 1169488)
+++ src/java/org/apache/nutch/parse/ParsePluginsReader.java	(working copy)
@@ -33,8 +33,8 @@
 import org.xml.sax.InputSource;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Hadoop imports
 import org.apache.hadoop.conf.Configuration;
@@ -53,7 +53,7 @@
 class ParsePluginsReader {
   
   /* our log stream */
-  public static final Log LOG = LogFactory.getLog(ParsePluginsReader.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ParsePluginsReader.class);
   
   /** The property name of the parse-plugins location */
   private static final String PP_FILE_PROP = "parse.plugin.file";
Index: src/java/org/apache/nutch/parse/ParseOutputFormat.java
===================================================================
--- src/java/org/apache/nutch/parse/ParseOutputFormat.java	(revision 1169488)
+++ src/java/org/apache/nutch/parse/ParseOutputFormat.java	(working copy)
@@ -18,8 +18,8 @@
 package org.apache.nutch.parse;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
@@ -45,7 +45,7 @@
 
 /* Parse content in a segment. */
 public class ParseOutputFormat implements OutputFormat<Text, Parse> {
-  private static final Log LOG = LogFactory.getLog(ParseOutputFormat.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ParseOutputFormat.class);
 
   private URLFilters filters;
   private URLNormalizers normalizers;
Index: src/java/org/apache/nutch/parse/ParseUtil.java
===================================================================
--- src/java/org/apache/nutch/parse/ParseUtil.java	(revision 1169488)
+++ src/java/org/apache/nutch/parse/ParseUtil.java	(working copy)
@@ -22,8 +22,8 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.protocol.Content;
 
@@ -40,7 +40,7 @@
 public class ParseUtil {
   
   /* our log stream */
-  public static final Log LOG = LogFactory.getLog(ParseUtil.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ParseUtil.class);
   private ParserFactory parserFactory;
   /** Parser timeout set to 30 sec by default. Set -1 to deactivate **/
   private int MAX_PARSE_TIME = 30;
Index: src/java/org/apache/nutch/parse/ParseResult.java
===================================================================
--- src/java/org/apache/nutch/parse/ParseResult.java	(revision 1169488)
+++ src/java/org/apache/nutch/parse/ParseResult.java	(working copy)
@@ -22,8 +22,8 @@
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -39,7 +39,7 @@
   private Map<Text, Parse> parseMap;
   private String originalUrl;
   
-  public static final Log LOG = LogFactory.getLog(ParseResult.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ParseResult.class);
   
   /**
    * Create a container for parse results.
Index: src/java/org/apache/nutch/parse/ParserChecker.java
===================================================================
--- src/java/org/apache/nutch/parse/ParserChecker.java	(revision 1169488)
+++ src/java/org/apache/nutch/parse/ParserChecker.java	(working copy)
@@ -17,8 +17,8 @@
 
 package org.apache.nutch.parse;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.Tool;
@@ -37,7 +37,7 @@
 
 public class ParserChecker implements Tool {
 
-  public static final Log LOG = LogFactory.getLog(ParserChecker.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ParserChecker.class);
 
   public ParserChecker() {
   }
Index: src/java/org/apache/nutch/parse/OutlinkExtractor.java
===================================================================
--- src/java/org/apache/nutch/parse/OutlinkExtractor.java	(revision 1169488)
+++ src/java/org/apache/nutch/parse/OutlinkExtractor.java	(working copy)
@@ -21,8 +21,8 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.oro.text.regex.MatchResult;
@@ -48,7 +48,7 @@
  * @since 0.7
  */
 public class OutlinkExtractor {
-  private static final Log LOG = LogFactory.getLog(OutlinkExtractor.class);
+  private static final Logger LOG = LoggerFactory.getLogger(OutlinkExtractor.class);
 
   /**
    * Regex pattern to get URLs within a plain text.
Index: src/java/org/apache/nutch/parse/ParserFactory.java
===================================================================
--- src/java/org/apache/nutch/parse/ParserFactory.java	(revision 1169488)
+++ src/java/org/apache/nutch/parse/ParserFactory.java	(working copy)
@@ -24,8 +24,8 @@
 import java.util.Vector;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Hadoop imports
 import org.apache.hadoop.conf.Configuration;
@@ -43,7 +43,7 @@
 /** Creates and caches {@link Parser} plugins.*/
 public final class ParserFactory {
   
-  public static final Log LOG = LogFactory.getLog(ParserFactory.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ParserFactory.class);
   
   /** Wildcard for default plugins. */
   public static final String DEFAULT_PLUGIN = "*";
Index: src/java/org/apache/nutch/parse/ParseSegment.java
===================================================================
--- src/java/org/apache/nutch/parse/ParseSegment.java	(revision 1169488)
+++ src/java/org/apache/nutch/parse/ParseSegment.java	(working copy)
@@ -17,8 +17,8 @@
 
 package org.apache.nutch.parse;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.nutch.crawl.SignatureFactory;
@@ -43,7 +43,7 @@
     Mapper<WritableComparable, Content, Text, ParseImpl>,
     Reducer<Text, Writable, Text, Writable> {
 
-  public static final Log LOG = LogFactory.getLog(ParseSegment.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ParseSegment.class);
   
   private ScoringFilters scfilters;
   
Index: src/java/org/apache/nutch/util/DomUtil.java
===================================================================
--- src/java/org/apache/nutch/util/DomUtil.java	(revision 1169488)
+++ src/java/org/apache/nutch/util/DomUtil.java	(working copy)
@@ -35,13 +35,13 @@
 import org.xml.sax.SAXException;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 public class DomUtil {
 
-  private final static Log LOG = LogFactory.getLog(DomUtil.class);
+  private final static Logger LOG = LoggerFactory.getLogger(DomUtil.class);
 
   /**
    * Returns parsed dom tree or null if any error
Index: src/java/org/apache/nutch/util/LogUtil.java
===================================================================
--- src/java/org/apache/nutch/util/LogUtil.java	(revision 1169488)
+++ src/java/org/apache/nutch/util/LogUtil.java	(working copy)
@@ -23,8 +23,8 @@
 import java.lang.reflect.Method;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 
 /**
@@ -34,7 +34,7 @@
  */
 public class LogUtil {
 
-  private final static Log LOG = LogFactory.getLog(LogUtil.class);
+  private final static Logger LOG = LoggerFactory.getLogger(LogUtil.class);
 
   private static Method TRACE = null;
   private static Method DEBUG = null;
@@ -45,12 +45,12 @@
 
   static {
     try {
-      TRACE = Log.class.getMethod("trace", new Class[] { Object.class });
-      DEBUG = Log.class.getMethod("debug", new Class[] { Object.class });
-      INFO  = Log.class.getMethod("info",  new Class[] { Object.class });
-      WARN  = Log.class.getMethod("warn",  new Class[] { Object.class });
-      ERROR = Log.class.getMethod("error", new Class[] { Object.class });
-      FATAL = Log.class.getMethod("fatal", new Class[] { Object.class });
+      TRACE = Logger.class.getMethod("trace", new Class[] { Object.class });
+      DEBUG = Logger.class.getMethod("debug", new Class[] { Object.class });
+      INFO  = Logger.class.getMethod("info",  new Class[] { Object.class });
+      WARN  = Logger.class.getMethod("warn",  new Class[] { Object.class });
+      ERROR = Logger.class.getMethod("error", new Class[] { Object.class });
+      FATAL = Logger.class.getMethod("error", new Class[] { Object.class });
     } catch(Exception e) {
       if (LOG.isErrorEnabled()) {
         LOG.error("Cannot init log methods", e);
@@ -59,32 +59,32 @@
   }
   
   
-  public static PrintStream getTraceStream(final Log logger) {
+  public static PrintStream getTraceStream(final Logger logger) {
     return getLogStream(logger, TRACE);
   }
 
-  public static PrintStream getDebugStream(final Log logger) {
+  public static PrintStream getDebugStream(final Logger logger) {
     return getLogStream(logger, DEBUG);
   }
 
-  public static PrintStream getInfoStream(final Log logger) {
+  public static PrintStream getInfoStream(final Logger logger) {
     return getLogStream(logger, INFO);
   }
   
-  public static PrintStream getWarnStream(final Log logger) {
+  public static PrintStream getWarnStream(final Logger logger) {
     return getLogStream(logger, WARN);
   }
 
-  public static PrintStream getErrorStream(final Log logger) {
+  public static PrintStream getErrorStream(final Logger logger) {
     return getLogStream(logger, ERROR);
   }
 
-  public static PrintStream getFatalStream(final Log logger) {
+  public static PrintStream getFatalStream(final Logger logger) {
     return getLogStream(logger, FATAL);
   }
   
   /** Returns a stream that, when written to, adds log lines. */
-  private static PrintStream getLogStream(final Log logger, final Method method) {
+  private static PrintStream getLogStream(final Logger logger, final Method method) {
     return new PrintStream(new ByteArrayOutputStream() {
         private int scan = 0;
 
@@ -102,8 +102,8 @@
           try {
             method.invoke(logger, new Object[] { toString().trim() });
           } catch (Exception e) {
-            if (LOG.isFatalEnabled()) {
-              LOG.fatal("Cannot log with method [" + method + "]", e);
+            if (LOG.isErrorEnabled()) {
+              LOG.error("Cannot log with method [" + method + "]", e);
             }
           }
           reset();
Index: src/java/org/apache/nutch/util/EncodingDetector.java
===================================================================
--- src/java/org/apache/nutch/util/EncodingDetector.java	(revision 1169488)
+++ src/java/org/apache/nutch/util/EncodingDetector.java	(working copy)
@@ -26,8 +26,8 @@
 import java.util.HashSet;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.metadata.Metadata;
 import org.apache.nutch.net.protocols.Response;
@@ -104,7 +104,7 @@
     }
   }
 
-  public static final Log LOG = LogFactory.getLog(EncodingDetector.class);
+  public static final Logger LOG = LoggerFactory.getLogger(EncodingDetector.class);
 
   public static final int NO_THRESHOLD = -1;
 
Index: src/java/org/apache/nutch/util/DeflateUtils.java
===================================================================
--- src/java/org/apache/nutch/util/DeflateUtils.java	(revision 1169488)
+++ src/java/org/apache/nutch/util/DeflateUtils.java	(working copy)
@@ -24,15 +24,15 @@
 import java.util.zip.DeflaterOutputStream;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *  A collection of utility methods for working on deflated data.
  */
 public class DeflateUtils {
   
-  private static final Log LOG = LogFactory.getLog(DeflateUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DeflateUtils.class);
   private static final int EXPECTED_COMPRESSION_RATIO = 5;
   private static final int BUF_SIZE = 4096;
 
Index: src/java/org/apache/nutch/util/domain/DomainStatistics.java
===================================================================
--- src/java/org/apache/nutch/util/domain/DomainStatistics.java	(revision 1169488)
+++ src/java/org/apache/nutch/util/domain/DomainStatistics.java	(working copy)
@@ -22,8 +22,8 @@
 import java.text.SimpleDateFormat;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
@@ -55,7 +55,7 @@
 implements Tool, Mapper<Text, CrawlDatum, Text, LongWritable>,
            Reducer<Text, LongWritable, LongWritable, Text> {
 
-  private static final Log LOG = LogFactory.getLog(DomainStatistics.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DomainStatistics.class);
   
   private static final Text FETCHED_TEXT = new Text("FETCHED");
   private static final Text NOT_FETCHED_TEXT = new Text("NOT_FETCHED");
Index: src/java/org/apache/nutch/util/domain/DomainSuffixes.java
===================================================================
--- src/java/org/apache/nutch/util/domain/DomainSuffixes.java	(revision 1169488)
+++ src/java/org/apache/nutch/util/domain/DomainSuffixes.java	(working copy)
@@ -20,8 +20,8 @@
 import java.io.InputStream;
 import java.util.HashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -30,7 +30,7 @@
  * @author Enis Soztutar &lt;enis.soz.nutch@gmail.com&gt;
  */
 public class DomainSuffixes {
-  private static final Log LOG = LogFactory.getLog(DomainSuffixes.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DomainSuffixes.class);
   
   private HashMap<String, DomainSuffix> domains = new HashMap<String, DomainSuffix>(); 
   
Index: src/java/org/apache/nutch/util/domain/DomainSuffixesReader.java
===================================================================
--- src/java/org/apache/nutch/util/domain/DomainSuffixesReader.java	(revision 1169488)
+++ src/java/org/apache/nutch/util/domain/DomainSuffixesReader.java	(working copy)
@@ -24,8 +24,8 @@
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.nutch.util.domain.DomainSuffix.Status;
 import org.apache.nutch.util.domain.TopLevelDomain.Type;
@@ -43,7 +43,7 @@
  */
 class DomainSuffixesReader {
 
-  private static final Log LOG = LogFactory.getLog(DomainSuffixesReader.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DomainSuffixesReader.class);
 
   void read(DomainSuffixes tldEntries, InputStream input) throws IOException{
     try {
Index: src/java/org/apache/nutch/util/ObjectCache.java
===================================================================
--- src/java/org/apache/nutch/util/ObjectCache.java	(revision 1169488)
+++ src/java/org/apache/nutch/util/ObjectCache.java	(working copy)
@@ -19,13 +19,13 @@
 import java.util.HashMap;
 import java.util.WeakHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 
 public class ObjectCache {
   
-  private static final Log LOG = LogFactory.getLog(ObjectCache.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ObjectCache.class);
   
   private static final WeakHashMap<Configuration, ObjectCache> CACHE = 
     new WeakHashMap<Configuration, ObjectCache>();
Index: src/java/org/apache/nutch/util/GZIPUtils.java
===================================================================
--- src/java/org/apache/nutch/util/GZIPUtils.java	(revision 1169488)
+++ src/java/org/apache/nutch/util/GZIPUtils.java	(working copy)
@@ -24,15 +24,15 @@
 import java.util.zip.GZIPOutputStream;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  *  A collection of utility methods for working on GZIPed data.
  */
 public class GZIPUtils {
   
-  private static final Log LOG = LogFactory.getLog(GZIPUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(GZIPUtils.class);
   private static final int EXPECTED_COMPRESSION_RATIO= 5;
   private static final int BUF_SIZE= 4096;
 
Index: src/java/org/apache/nutch/indexer/solr/SolrDeleteDuplicates.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrDeleteDuplicates.java	(revision 1169488)
+++ src/java/org/apache/nutch/indexer/solr/SolrDeleteDuplicates.java	(working copy)
@@ -24,8 +24,8 @@
 import java.util.Iterator;
 import java.util.Date;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -85,7 +85,7 @@
 implements Reducer<Text, SolrDeleteDuplicates.SolrRecord, Text, SolrDeleteDuplicates.SolrRecord>,
 Tool {
 
-  public static final Log LOG = LogFactory.getLog(SolrDeleteDuplicates.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SolrDeleteDuplicates.class);
 
   private static final String SOLR_GET_ALL_QUERY = SolrConstants.ID_FIELD + ":[* TO *]";
 
Index: src/java/org/apache/nutch/indexer/solr/SolrUtils.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrUtils.java	(revision 1169488)
+++ src/java/org/apache/nutch/indexer/solr/SolrUtils.java	(working copy)
@@ -20,8 +20,8 @@
 import org.apache.commons.httpclient.auth.AuthScope;
 import org.apache.commons.httpclient.UsernamePasswordCredentials;
 import org.apache.commons.httpclient.params.HttpClientParams;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer;
 
@@ -29,7 +29,7 @@
 
 public class SolrUtils {
 
-  public static Log LOG = LogFactory.getLog(SolrIndexer.class);
+  public static Logger LOG = LoggerFactory.getLogger(SolrIndexer.class);
 
   public static CommonsHttpSolrServer getCommonsHttpSolrServer(JobConf job) throws MalformedURLException {
     HttpClient client=new HttpClient();
@@ -73,4 +73,4 @@
 
     return retval.toString();
   }
-}
\ No newline at end of file
+}
Index: src/java/org/apache/nutch/indexer/solr/SolrClean.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrClean.java	(revision 1169488)
+++ src/java/org/apache/nutch/indexer/solr/SolrClean.java	(working copy)
@@ -21,8 +21,8 @@
 import java.text.SimpleDateFormat;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.ByteWritable;
@@ -57,7 +57,7 @@
 */
 
 public class SolrClean implements Tool {
-  public static final Log LOG = LogFactory.getLog(SolrClean.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SolrClean.class);
   private Configuration conf;
 
   @Override
@@ -195,4 +195,4 @@
         new SolrClean(), args);
     System.exit(result);
   }
-}
\ No newline at end of file
+}
Index: src/java/org/apache/nutch/indexer/solr/SolrWriter.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrWriter.java	(revision 1169488)
+++ src/java/org/apache/nutch/indexer/solr/SolrWriter.java	(working copy)
@@ -22,8 +22,8 @@
 import java.util.List;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.nutch.indexer.NutchDocument;
 import org.apache.nutch.indexer.NutchField;
@@ -35,7 +35,7 @@
 
 public class SolrWriter implements NutchIndexWriter {
 
-  public static Log LOG = LogFactory.getLog(SolrWriter.class);
+  public static Logger LOG = LoggerFactory.getLogger(SolrWriter.class);
 
   private SolrServer solr;
   private SolrMappingReader solrMapping;
Index: src/java/org/apache/nutch/indexer/solr/SolrIndexer.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrIndexer.java	(revision 1169488)
+++ src/java/org/apache/nutch/indexer/solr/SolrIndexer.java	(working copy)
@@ -16,8 +16,8 @@
  */
 package org.apache.nutch.indexer.solr;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileStatus;
@@ -45,7 +45,7 @@
 
 public class SolrIndexer extends Configured implements Tool {
 
-  public static Log LOG = LogFactory.getLog(SolrIndexer.class);
+  public static Logger LOG = LoggerFactory.getLogger(SolrIndexer.class);
 
   public SolrIndexer() {
     super(null);
@@ -92,7 +92,7 @@
       LOG.info("SolrIndexer: finished at " + sdf.format(end) + ", elapsed: " + TimingUtil.elapsedTime(start, end));
     }
     catch (Exception e){
-      LOG.error(e);
+      LOG.error(e.toString());
     } finally {
       FileSystem.get(job).delete(tmp, true);
     }
@@ -135,7 +135,7 @@
       indexSolr(args[0], crawlDb, linkDb, segments, noCommit);
       return 0;
     } catch (final Exception e) {
-      LOG.fatal("SolrIndexer: " + StringUtils.stringifyException(e));
+      LOG.error("SolrIndexer: " + StringUtils.stringifyException(e));
       return -1;
     }
   }
Index: src/java/org/apache/nutch/indexer/solr/SolrMappingReader.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrMappingReader.java	(revision 1169488)
+++ src/java/org/apache/nutch/indexer/solr/SolrMappingReader.java	(working copy)
@@ -26,8 +26,8 @@
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.util.ObjectCache;
 import org.w3c.dom.Document;
@@ -37,7 +37,7 @@
 import org.xml.sax.SAXException;
 
 public class SolrMappingReader {
-  public static Log LOG = LogFactory.getLog(SolrMappingReader.class);
+  public static Logger LOG = LoggerFactory.getLogger(SolrMappingReader.class);
   
   private Configuration conf;
   
Index: src/java/org/apache/nutch/indexer/IndexingFiltersChecker.java
===================================================================
--- src/java/org/apache/nutch/indexer/IndexingFiltersChecker.java	(revision 1169488)
+++ src/java/org/apache/nutch/indexer/IndexingFiltersChecker.java	(working copy)
@@ -3,8 +3,8 @@
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
@@ -33,7 +33,7 @@
 
 public class IndexingFiltersChecker extends Configured implements Tool {
   
-  public static final Log LOG = LogFactory.getLog(IndexingFiltersChecker.class);
+  public static final Logger LOG = LoggerFactory.getLogger(IndexingFiltersChecker.class);
   
   public IndexingFiltersChecker() {
 
Index: src/java/org/apache/nutch/indexer/IndexerMapReduce.java
===================================================================
--- src/java/org/apache/nutch/indexer/IndexerMapReduce.java	(revision 1169488)
+++ src/java/org/apache/nutch/indexer/IndexerMapReduce.java	(working copy)
@@ -20,8 +20,8 @@
 import java.util.Collection;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -52,7 +52,7 @@
 implements Mapper<Text, Writable, Text, NutchWritable>,
           Reducer<Text, NutchWritable, Text, NutchDocument> {
 
-  public static final Log LOG = LogFactory.getLog(IndexerMapReduce.class);
+  public static final Logger LOG = LoggerFactory.getLogger(IndexerMapReduce.class);
 
   private IndexingFilters filters;
   private ScoringFilters scfilters;
Index: src/java/org/apache/nutch/indexer/IndexingFilters.java
===================================================================
--- src/java/org/apache/nutch/indexer/IndexingFilters.java	(revision 1169488)
+++ src/java/org/apache/nutch/indexer/IndexingFilters.java	(working copy)
@@ -21,8 +21,8 @@
 import java.util.HashMap;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.plugin.*;
 import org.apache.nutch.parse.Parse;
@@ -37,7 +37,7 @@
 
   public static final String INDEXINGFILTER_ORDER = "indexingfilter.order";
 
-  public final static Log LOG = LogFactory.getLog(IndexingFilters.class);
+  public final static Logger LOG = LoggerFactory.getLogger(IndexingFilters.class);
 
   private IndexingFilter[] indexingFilters;
 
Index: src/java/org/apache/nutch/plugin/PluginManifestParser.java
===================================================================
--- src/java/org/apache/nutch/plugin/PluginManifestParser.java	(revision 1169488)
+++ src/java/org/apache/nutch/plugin/PluginManifestParser.java	(working copy)
@@ -29,7 +29,7 @@
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
 
-import org.apache.commons.logging.Log;
+import org.slf4j.Logger;
 
 import org.apache.hadoop.conf.Configuration;
 import org.w3c.dom.Document;
@@ -49,7 +49,7 @@
   private static final String ATTR_CLASS = "class";
   private static final String ATTR_ID = "id";
 
-  public static final Log LOG = PluginRepository.LOG;
+  public static final Logger LOG = PluginRepository.LOG;
 
   private static final boolean WINDOWS = System.getProperty("os.name")
       .startsWith("Windows");
Index: src/java/org/apache/nutch/plugin/PluginDescriptor.java
===================================================================
--- src/java/org/apache/nutch/plugin/PluginDescriptor.java	(revision 1169488)
+++ src/java/org/apache/nutch/plugin/PluginDescriptor.java	(working copy)
@@ -25,8 +25,8 @@
 import java.util.Locale;
 import java.util.MissingResourceException;
 import java.util.ResourceBundle;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -53,7 +53,7 @@
   private ArrayList<URL> fNotExportedLibs = new ArrayList<URL>();
   private ArrayList<Extension> fExtensions = new ArrayList<Extension>();
   private PluginClassLoader fClassLoader;
-  public static final Log LOG = LogFactory.getLog(PluginDescriptor.class);
+  public static final Logger LOG = LoggerFactory.getLogger(PluginDescriptor.class);
   private Configuration fConf;
 
   /**
Index: src/java/org/apache/nutch/plugin/PluginRepository.java
===================================================================
--- src/java/org/apache/nutch/plugin/PluginRepository.java	(revision 1169488)
+++ src/java/org/apache/nutch/plugin/PluginRepository.java	(working copy)
@@ -25,8 +25,8 @@
 import java.util.List;
 import java.util.Map;
 import java.util.regex.Pattern;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.util.NutchConfiguration;
 
@@ -55,7 +55,7 @@
 
   private Configuration conf;
 
-  public static final Log LOG = LogFactory.getLog(PluginRepository.class);
+  public static final Logger LOG = LoggerFactory.getLogger(PluginRepository.class);
 
   /**
    * @throws PluginRuntimeException
@@ -80,7 +80,7 @@
     try {
       installExtensions(fRegisteredPlugins);
     } catch (PluginRuntimeException e) {
-        LOG.fatal(e.toString());
+        LOG.error(e.toString());
       throw new RuntimeException(e.getMessage());
     }
     displayStatus();
@@ -197,7 +197,7 @@
         checked.putAll(getPluginCheckedDependencies(plugin, all));
         checked.put(plugin.getPluginId(), plugin);
       } catch (MissingDependencyException mde) {
-        // Log exception and ignore plugin
+        // Logger exception and ignore plugin
         LOG.warn(mde.getMessage());
       } catch (CircularDependencyException cde) {
         // Simply ignore this plugin
Index: src/plugin/index-anchor/src/java/org/apache/nutch/indexer/anchor/AnchorIndexingFilter.java
===================================================================
--- src/plugin/index-anchor/src/java/org/apache/nutch/indexer/anchor/AnchorIndexingFilter.java	(revision 1169488)
+++ src/plugin/index-anchor/src/java/org/apache/nutch/indexer/anchor/AnchorIndexingFilter.java	(working copy)
@@ -18,8 +18,8 @@
 
 import java.util.WeakHashMap;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -35,7 +35,7 @@
 public class AnchorIndexingFilter
   implements IndexingFilter {
 
-  public static final Log LOG = LogFactory.getLog(AnchorIndexingFilter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(AnchorIndexingFilter.class);
   private Configuration conf;
   private boolean deduplicate = false;
 
Index: src/plugin/microformats-reltag/src/java/org/apache/nutch/microformats/reltag/RelTagParser.java
===================================================================
--- src/plugin/microformats-reltag/src/java/org/apache/nutch/microformats/reltag/RelTagParser.java	(revision 1169488)
+++ src/plugin/microformats-reltag/src/java/org/apache/nutch/microformats/reltag/RelTagParser.java	(working copy)
@@ -28,8 +28,8 @@
 import org.w3c.dom.NodeList;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Nutch imports
 import org.apache.nutch.metadata.Metadata;
@@ -54,7 +54,7 @@
  */
 public class RelTagParser implements HtmlParseFilter {
   
-  public final static Log LOG = LogFactory.getLog(RelTagParser.class);
+  public final static Logger LOG = LoggerFactory.getLogger(RelTagParser.class);
 
   public final static String REL_TAG = "Rel-Tag";
   
Index: src/plugin/tld/src/java/org/apache/nutch/indexer/tld/TLDIndexingFilter.java
===================================================================
--- src/plugin/tld/src/java/org/apache/nutch/indexer/tld/TLDIndexingFilter.java	(revision 1169488)
+++ src/plugin/tld/src/java/org/apache/nutch/indexer/tld/TLDIndexingFilter.java	(working copy)
@@ -19,8 +19,8 @@
 
 import java.net.URL;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -37,7 +37,7 @@
  * @author Enis Soztutar &lt;enis.soz.nutch@gmail.com&gt;
  */
 public class TLDIndexingFilter implements IndexingFilter {
-  public static final Log LOG = LogFactory.getLog(TLDIndexingFilter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(TLDIndexingFilter.class);
 
   private Configuration conf;
 
@@ -51,7 +51,7 @@
       doc.add("tld", d.getDomain());
       
     }catch (Exception ex) {
-      LOG.warn(ex);
+      LOG.warn(ex.toString());
     }
 
     return doc;
Index: src/plugin/parse-js/src/java/org/apache/nutch/parse/js/JSParseFilter.java
===================================================================
--- src/plugin/parse-js/src/java/org/apache/nutch/parse/js/JSParseFilter.java	(revision 1169488)
+++ src/plugin/parse-js/src/java/org/apache/nutch/parse/js/JSParseFilter.java	(working copy)
@@ -26,8 +26,8 @@
 import java.util.Arrays;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.parse.HTMLMetaTags;
 import org.apache.nutch.parse.HtmlParseFilter;
@@ -65,7 +65,7 @@
  * @author Andrzej Bialecki &lt;ab@getopt.org&gt;
  */
 public class JSParseFilter implements HtmlParseFilter, Parser {
-  public static final Log LOG = LogFactory.getLog(JSParseFilter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(JSParseFilter.class);
 
   private static final int MAX_TITLE_LEN = 80;
 
Index: src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java
===================================================================
--- src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java	(revision 1169488)
+++ src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java	(working copy)
@@ -21,8 +21,8 @@
 import java.net.MalformedURLException;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Nutch imports
 import org.apache.nutch.net.URLNormalizer;
@@ -33,7 +33,7 @@
 
 /** Converts URLs to a normal form . */
 public class BasicURLNormalizer implements URLNormalizer {
-    public static final Log LOG = LogFactory.getLog(BasicURLNormalizer.class);
+    public static final Logger LOG = LoggerFactory.getLogger(BasicURLNormalizer.class);
 
     private Perl5Compiler compiler = new Perl5Compiler();
     private ThreadLocal matchers = new ThreadLocal() {
Index: src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java
===================================================================
--- src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java	(revision 1169488)
+++ src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java	(working copy)
@@ -17,8 +17,8 @@
 
 package org.apache.nutch.indexer.basic;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.metadata.Nutch;
 import org.apache.nutch.parse.Parse;
@@ -39,7 +39,7 @@
 
 /** Adds basic searchable fields to a document. */
 public class BasicIndexingFilter implements IndexingFilter {
-  public static final Log LOG = LogFactory.getLog(BasicIndexingFilter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(BasicIndexingFilter.class);
 
   private int MAX_TITLE_LENGTH;
   private Configuration conf;
Index: src/plugin/scoring-opic/src/java/org/apache/nutch/scoring/opic/OPICScoringFilter.java
===================================================================
--- src/plugin/scoring-opic/src/java/org/apache/nutch/scoring/opic/OPICScoringFilter.java	(revision 1169488)
+++ src/plugin/scoring-opic/src/java/org/apache/nutch/scoring/opic/OPICScoringFilter.java	(working copy)
@@ -24,8 +24,8 @@
 import java.util.Map.Entry;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
@@ -52,7 +52,7 @@
  */
 public class OPICScoringFilter implements ScoringFilter {
 
-  private final static Log LOG = LogFactory.getLog(OPICScoringFilter.class);
+  private final static Logger LOG = LoggerFactory.getLogger(OPICScoringFilter.class);
 
   private Configuration conf;
   private float scoreInjected;
Index: src/plugin/index-static/src/java/org/apache/nutch/indexer/staticfield/StaticFieldIndexer.java
===================================================================
--- src/plugin/index-static/src/java/org/apache/nutch/indexer/staticfield/StaticFieldIndexer.java	(revision 1169502)
+++ src/plugin/index-static/src/java/org/apache/nutch/indexer/staticfield/StaticFieldIndexer.java	(working copy)
@@ -20,6 +20,8 @@
 import java.util.HashMap;
 import java.util.Map.Entry;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.nutch.crawl.Inlinks;
 import org.apache.nutch.indexer.IndexingFilter;
Index: src/plugin/urlfilter-prefix/src/java/org/apache/nutch/urlfilter/prefix/PrefixURLFilter.java
===================================================================
--- src/plugin/urlfilter-prefix/src/java/org/apache/nutch/urlfilter/prefix/PrefixURLFilter.java	(revision 1169488)
+++ src/plugin/urlfilter-prefix/src/java/org/apache/nutch/urlfilter/prefix/PrefixURLFilter.java	(working copy)
@@ -19,8 +19,8 @@
 
 package org.apache.nutch.urlfilter.prefix;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.*;
@@ -51,7 +51,7 @@
  */
 public class PrefixURLFilter implements URLFilter {
 
-  private static final Log LOG = LogFactory.getLog(PrefixURLFilter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(PrefixURLFilter.class);
 
   // read in attribute "file" of this plugin.
   private static String attributeFile = null;
@@ -162,7 +162,7 @@
       try {
         trie = readConfiguration(reader);
       } catch (IOException e) {
-        if (LOG.isFatalEnabled()) { LOG.fatal(e.getMessage()); }
+        if (LOG.isErrorEnabled()) { LOG.error(e.getMessage()); }
         // TODO mb@media-style.com: throw Exception? Because broken api.
         throw new RuntimeException(e.getMessage(), e);
       }
Index: src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java
===================================================================
--- src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java	(revision 1169488)
+++ src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java	(working copy)
@@ -21,8 +21,8 @@
 import java.io.IOException;
 import java.util.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.metadata.Metadata;
 import org.apache.nutch.net.protocols.Response;
@@ -47,7 +47,7 @@
  * @author Andrzej Bialecki
  */
 public class SWFParser implements Parser {
-  public static final Log LOG = LogFactory.getLog("org.apache.nutch.parse.swf");
+  public static final Logger LOG = LoggerFactory.getLogger("org.apache.nutch.parse.swf");
 
   private Configuration conf = null;
 
Index: src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java
===================================================================
--- src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java	(revision 1169488)
+++ src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java	(working copy)
@@ -22,8 +22,8 @@
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
@@ -43,7 +43,7 @@
  */
 public class URLMetaScoringFilter extends Configured implements ScoringFilter {
 
-  private static final Log LOG = LogFactory.getLog(URLMetaScoringFilter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(URLMetaScoringFilter.class);
   private static final String CONF_PROPERTY = "urlmeta.tags";
   private static String[] urlMetaTags;
   private Configuration conf;
Index: src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java
===================================================================
--- src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java	(revision 1169488)
+++ src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java	(working copy)
@@ -17,8 +17,8 @@
 
 package org.apache.nutch.indexer.urlmeta;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -67,8 +67,8 @@
  */
 public class URLMetaIndexingFilter implements IndexingFilter {
 
-	private static final Log LOG = LogFactory
-			.getLog(URLMetaIndexingFilter.class);
+	private static final Logger LOG = LoggerFactory
+			.getLogger(URLMetaIndexingFilter.class);
 	private static final String CONF_PROPERTY = "urlmeta.tags";
 	private static String[] urlMetaTags;
 	private Configuration conf;
@@ -115,4 +115,4 @@
 
 		urlMetaTags = conf.getStrings(CONF_PROPERTY);
 	}
-}
\ No newline at end of file
+}
Index: src/plugin/urlfilter-suffix/src/java/org/apache/nutch/urlfilter/suffix/SuffixURLFilter.java
===================================================================
--- src/plugin/urlfilter-suffix/src/java/org/apache/nutch/urlfilter/suffix/SuffixURLFilter.java	(revision 1169488)
+++ src/plugin/urlfilter-suffix/src/java/org/apache/nutch/urlfilter/suffix/SuffixURLFilter.java	(working copy)
@@ -26,8 +26,8 @@
 import org.apache.nutch.plugin.Extension;
 import org.apache.nutch.plugin.PluginRepository;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.Reader;
 import java.io.FileReader;
@@ -123,7 +123,7 @@
  */
 public class SuffixURLFilter implements URLFilter {
 
-  private static final Log LOG = LogFactory.getLog(SuffixURLFilter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SuffixURLFilter.class);
 
   // read in attribute "file" of this plugin.
   private String attributeFile = null;
@@ -283,7 +283,7 @@
     try {
       readConfiguration(reader);
     } catch (IOException e) {
-      if (LOG.isFatalEnabled()) { LOG.fatal(e.getMessage()); }
+      if (LOG.isErrorEnabled()) { LOG.error(e.getMessage()); }
       throw new RuntimeException(e.getMessage(), e);
     }
   }
Index: src/plugin/parse-zip/src/java/org/apache/nutch/parse/zip/ZipParser.java
===================================================================
--- src/plugin/parse-zip/src/java/org/apache/nutch/parse/zip/ZipParser.java	(revision 1169488)
+++ src/plugin/parse-zip/src/java/org/apache/nutch/parse/zip/ZipParser.java	(working copy)
@@ -23,8 +23,8 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.metadata.Metadata;
 import org.apache.nutch.net.protocols.Response;
@@ -45,7 +45,7 @@
  */
 public class ZipParser implements Parser {
 
-  private static final Log LOG = LogFactory.getLog(ZipParser.class);
+  private static final Logger LOG = LoggerFactory.getLogger(ZipParser.class);
   private Configuration conf;
 
   /** Creates a new instance of ZipParser */
Index: src/plugin/parse-zip/src/java/org/apache/nutch/parse/zip/ZipTextExtractor.java
===================================================================
--- src/plugin/parse-zip/src/java/org/apache/nutch/parse/zip/ZipTextExtractor.java	(revision 1169488)
+++ src/plugin/parse-zip/src/java/org/apache/nutch/parse/zip/ZipTextExtractor.java	(working copy)
@@ -26,8 +26,8 @@
 import java.net.URL;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Hadoop imports
 import org.apache.hadoop.conf.Configuration;
@@ -55,7 +55,7 @@
   /** Get the MimeTypes resolver instance. */
   private MimeUtil MIME;
   
-  public static final Log LOG = LogFactory.getLog(ZipTextExtractor.class);
+  public static final Logger LOG = LoggerFactory.getLogger(ZipTextExtractor.class);
 
   private Configuration conf;
   
Index: src/plugin/urlnormalizer-regex/src/test/org/apache/nutch/net/urlnormalizer/regex/TestRegexURLNormalizer.java
===================================================================
--- src/plugin/urlnormalizer-regex/src/test/org/apache/nutch/net/urlnormalizer/regex/TestRegexURLNormalizer.java	(revision 1169488)
+++ src/plugin/urlnormalizer-regex/src/test/org/apache/nutch/net/urlnormalizer/regex/TestRegexURLNormalizer.java	(working copy)
@@ -30,8 +30,8 @@
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLNormalizers;
 /**
@@ -57,7 +57,7 @@
 
 /** Unit tests for RegexUrlNormalizer. */
 public class TestRegexURLNormalizer extends TestCase {
-  private static final Log LOG = LogFactory.getLog(TestRegexURLNormalizer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestRegexURLNormalizer.class);
   
   private RegexURLNormalizer normalizer;
   private Configuration conf;
Index: src/plugin/urlnormalizer-regex/src/java/org/apache/nutch/net/urlnormalizer/regex/RegexURLNormalizer.java
===================================================================
--- src/plugin/urlnormalizer-regex/src/java/org/apache/nutch/net/urlnormalizer/regex/RegexURLNormalizer.java	(revision 1169488)
+++ src/plugin/urlnormalizer-regex/src/java/org/apache/nutch/net/urlnormalizer/regex/RegexURLNormalizer.java	(working copy)
@@ -32,8 +32,8 @@
 import java.util.ArrayList;
 import java.util.Iterator;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 
@@ -63,7 +63,7 @@
  */
 public class RegexURLNormalizer extends Configured implements URLNormalizer {
 
-  private static final Log LOG = LogFactory.getLog(RegexURLNormalizer.class);
+  private static final Logger LOG = LoggerFactory.getLogger(RegexURLNormalizer.class);
 
   /**
    * Class which holds a compiled pattern and its corresponding substition
@@ -211,8 +211,8 @@
               .parse(new InputSource(reader));
       Element root = doc.getDocumentElement();
       if ((!"regex-normalize".equals(root.getTagName()))
-              && (LOG.isFatalEnabled())) {
-        LOG.fatal("bad conf file: top-level element not <regex-normalize>");
+              && (LOG.isErrorEnabled())) {
+        LOG.error("bad conf file: top-level element not <regex-normalize>");
       }
       NodeList regexes = root.getChildNodes();
       for (int i = 0; i < regexes.getLength(); i++) {
@@ -247,8 +247,8 @@
         }
       }
     } catch (Exception e) {
-      if (LOG.isFatalEnabled()) {
-        LOG.fatal("error parsing conf file: " + e);
+      if (LOG.isErrorEnabled()) {
+        LOG.error("error parsing conf file: " + e);
       }
       return EMPTY_RULES;
     }
Index: src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/File.java
===================================================================
--- src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/File.java	(revision 1169488)
+++ src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/File.java	(working copy)
@@ -17,8 +17,8 @@
 
 package org.apache.nutch.protocol.file;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.hadoop.io.Text;
@@ -48,7 +48,7 @@
  ***********************************/
 public class File implements Protocol {
 
-  public static final Log LOG = LogFactory.getLog(File.class);
+  public static final Logger LOG = LoggerFactory.getLogger(File.class);
 
   static final int MAX_REDIRECTS = 5;
 
Index: src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/HttpBase.java
===================================================================
--- src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/HttpBase.java	(revision 1169488)
+++ src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/HttpBase.java	(working copy)
@@ -21,8 +21,8 @@
 import java.net.URL;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Nutch imports
 import org.apache.nutch.crawl.CrawlDatum;
@@ -78,10 +78,10 @@
   protected String acceptLanguage = "en-us,en-gb,en;q=0.7,*;q=0.3";
     
   /** The default logger */
-  private final static Log LOGGER = LogFactory.getLog(HttpBase.class);
+  private final static Logger LOGGER = LoggerFactory.getLogger(HttpBase.class);
 
   /** The specified logger */
-  private Log logger = LOGGER;
+  private Logger logger = LOGGER;
  
   /** The nutch configuration */
   private Configuration conf = null;
@@ -98,7 +98,7 @@
   }
   
   /** Creates a new instance of HttpBase */
-  public HttpBase(Log logger) {
+  public HttpBase(Logger logger) {
     if (logger != null) {
       this.logger = logger;
     }
@@ -247,8 +247,8 @@
     
     if ( (agentName == null) || (agentName.trim().length() == 0) ) {
       // TODO : NUTCH-258
-      if (LOGGER.isFatalEnabled()) {
-        LOGGER.fatal("No User-Agent string set (http.agent.name)!");
+      if (LOGGER.isErrorEnabled()) {
+        LOGGER.error("No User-Agent string set (http.agent.name)!");
       }
     }
     
Index: src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/RobotRulesParser.java
===================================================================
--- src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/RobotRulesParser.java	(revision 1169488)
+++ src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/RobotRulesParser.java	(working copy)
@@ -29,8 +29,8 @@
 import java.util.StringTokenizer;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 // Nutch imports
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configurable;
@@ -52,7 +52,7 @@
  */
 public class RobotRulesParser implements Configurable {
   
-  public static final Log LOG = LogFactory.getLog(RobotRulesParser.class);
+  public static final Logger LOG = LoggerFactory.getLogger(RobotRulesParser.class);
 
   private boolean allowForbidden = false;
 
Index: src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/PrintCommandListener.java
===================================================================
--- src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/PrintCommandListener.java	(revision 1169488)
+++ src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/PrintCommandListener.java	(working copy)
@@ -21,7 +21,7 @@
 import java.io.StringReader;
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
+import org.slf4j.Logger;
 
 import org.apache.commons.net.ProtocolCommandEvent;
 import org.apache.commons.net.ProtocolCommandListener;
@@ -33,9 +33,9 @@
  ***/
 public class PrintCommandListener implements ProtocolCommandListener
 {
-    private Log __logger;
+    private Logger __logger;
 
-    public PrintCommandListener(Log logger)
+    public PrintCommandListener(Logger logger)
     {
         __logger = logger;
     }
Index: src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java
===================================================================
--- src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java	(revision 1169488)
+++ src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java	(working copy)
@@ -17,8 +17,8 @@
 
 package org.apache.nutch.protocol.ftp;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.commons.net.ftp.FTPFileEntryParser;
 
@@ -50,7 +50,7 @@
  ***********************************/
 public class Ftp implements Protocol {
 
-  public static final Log LOG = LogFactory.getLog(Ftp.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Ftp.class);
 
   static final int BUFFER_SIZE = 16384; // 16*1024 = 16384
 
Index: src/plugin/creativecommons/src/java/org/creativecommons/nutch/CCIndexingFilter.java
===================================================================
--- src/plugin/creativecommons/src/java/org/creativecommons/nutch/CCIndexingFilter.java	(revision 1169488)
+++ src/plugin/creativecommons/src/java/org/creativecommons/nutch/CCIndexingFilter.java	(working copy)
@@ -32,8 +32,8 @@
 
 import org.apache.hadoop.conf.Configuration;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.*;
 import java.net.URL;
@@ -41,7 +41,7 @@
 
 /** Adds basic searchable fields to a document. */
 public class CCIndexingFilter implements IndexingFilter {
-  public static final Log LOG = LogFactory.getLog(CCIndexingFilter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(CCIndexingFilter.class);
 
   /** The name of the document field we use. */
   public static String FIELD = "cc";
Index: src/plugin/creativecommons/src/java/org/creativecommons/nutch/CCParseFilter.java
===================================================================
--- src/plugin/creativecommons/src/java/org/creativecommons/nutch/CCParseFilter.java	(revision 1169488)
+++ src/plugin/creativecommons/src/java/org/creativecommons/nutch/CCParseFilter.java	(working copy)
@@ -24,8 +24,8 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.*;
 import java.io.*;
@@ -37,7 +37,7 @@
 
 /** Adds metadata identifying the Creative Commons license used, if any. */
 public class CCParseFilter implements HtmlParseFilter {
-  public static final Log LOG = LogFactory.getLog(CCParseFilter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(CCParseFilter.class);
 
 
   /** Walks DOM tree, looking for RDF in comments and licenses in anchors.*/
Index: src/plugin/parse-tika/src/test/org/apache/nutch/tika/TestFeedParser.java
===================================================================
--- src/plugin/parse-tika/src/test/org/apache/nutch/tika/TestFeedParser.java	(revision 1169488)
+++ src/plugin/parse-tika/src/test/org/apache/nutch/tika/TestFeedParser.java	(working copy)
@@ -19,8 +19,8 @@
 
 import junit.framework.TestCase;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -52,7 +52,7 @@
 
 	private String[] sampleFiles = { "rsstest.rss" };
 
-	public static final Log LOG = LogFactory.getLog(TestFeedParser.class
+	public static final Logger LOG = LoggerFactory.getLogger(TestFeedParser.class
 			.getName());
 
 	/**
Index: src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java
===================================================================
--- src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java	(revision 1169488)
+++ src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java	(working copy)
@@ -23,8 +23,8 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.html.dom.HTMLDocumentImpl;
 import org.apache.nutch.metadata.Nutch;
@@ -50,7 +50,7 @@
 
 public class TikaParser implements org.apache.nutch.parse.Parser {
 
-	public static final Log LOG = LogFactory.getLog(TikaParser.class);
+	public static final Logger LOG = LoggerFactory.getLogger(TikaParser.class);
 
 	private Configuration conf;
 	private TikaConfig tikaConfig = null;
Index: src/plugin/lib-regex-filter/src/test/org/apache/nutch/urlfilter/api/RegexURLFilterBaseTest.java
===================================================================
--- src/plugin/lib-regex-filter/src/test/org/apache/nutch/urlfilter/api/RegexURLFilterBaseTest.java	(revision 1169488)
+++ src/plugin/lib-regex-filter/src/test/org/apache/nutch/urlfilter/api/RegexURLFilterBaseTest.java	(working copy)
@@ -28,8 +28,8 @@
 import junit.framework.TestCase;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Nutch imports
 import org.apache.nutch.net.URLFilter;
@@ -43,7 +43,7 @@
 public abstract class RegexURLFilterBaseTest extends TestCase {
   
   /** My logger */
-  protected static final Log LOG = LogFactory.getLog(RegexURLFilterBaseTest.class);  
+  protected static final Logger LOG = LoggerFactory.getLogger(RegexURLFilterBaseTest.class);  
 
   private final static String SEPARATOR = System.getProperty("file.separator");  
   private final static String SAMPLES = System.getProperty("test.data", ".");
Index: src/plugin/lib-regex-filter/src/java/org/apache/nutch/urlfilter/api/RegexURLFilterBase.java
===================================================================
--- src/plugin/lib-regex-filter/src/java/org/apache/nutch/urlfilter/api/RegexURLFilterBase.java	(revision 1169488)
+++ src/plugin/lib-regex-filter/src/java/org/apache/nutch/urlfilter/api/RegexURLFilterBase.java	(working copy)
@@ -28,8 +28,8 @@
 import java.util.ArrayList;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Hadoop imports
 import org.apache.hadoop.conf.Configuration;
@@ -58,7 +58,7 @@
 public abstract class RegexURLFilterBase implements URLFilter {
 
   /** My logger */
-  private final static Log LOG = LogFactory.getLog(RegexURLFilterBase.class);
+  private final static Logger LOG = LoggerFactory.getLogger(RegexURLFilterBase.class);
 
   /** An array of applicable rules */
   private RegexRule[] rules;
Index: src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java
===================================================================
--- src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java	(revision 1169488)
+++ src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java	(working copy)
@@ -22,8 +22,8 @@
 import java.util.Map;
 
 // APACHE imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -59,7 +59,7 @@
 
   private String[] sampleFiles = { "rsstest.rss" };
 
-  public static final Log LOG = LogFactory.getLog(TestFeedParser.class
+  public static final Logger LOG = LoggerFactory.getLogger(TestFeedParser.class
       .getName());
 
   /**
Index: src/plugin/feed/src/java/org/apache/nutch/parse/feed/FeedParser.java
===================================================================
--- src/plugin/feed/src/java/org/apache/nutch/parse/feed/FeedParser.java	(revision 1169488)
+++ src/plugin/feed/src/java/org/apache/nutch/parse/feed/FeedParser.java	(working copy)
@@ -27,11 +27,12 @@
 import java.util.Map.Entry;
 
 // APACHE imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.StringUtils;
+// import org.apache.nutch.indexer.anchor.AnchorIndexingFilter; removed as per NUTCH-1078
 import org.apache.nutch.metadata.Feed;
 import org.apache.nutch.metadata.Metadata;
 import org.apache.nutch.net.URLFilters;
@@ -78,8 +79,7 @@
   public static final String TEXT_PLAIN_CONTENT_TYPE = "text/plain; "
       + CHARSET_UTF8;
 
-  public static final Log LOG = LogFactory
-      .getLog("org.apache.nutch.parse.feed");
+  public static final Logger LOG = LoggerFactory.getLogger(FeedParser.class);
 
   private Configuration conf;
 
Index: src/plugin/subcollection/src/java/org/apache/nutch/collection/CollectionManager.java
===================================================================
--- src/plugin/subcollection/src/java/org/apache/nutch/collection/CollectionManager.java	(revision 1169488)
+++ src/plugin/subcollection/src/java/org/apache/nutch/collection/CollectionManager.java	(working copy)
@@ -30,8 +30,8 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -48,7 +48,7 @@
 
   public static final String DEFAULT_FILE_NAME = "subcollections.xml";
 
-  static final Log LOG = LogFactory.getLog(CollectionManager.class);
+  static final Logger LOG = LoggerFactory.getLogger(CollectionManager.class);
 
   transient Map collectionMap = new HashMap();
 
Index: src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java
===================================================================
--- src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java	(revision 1169488)
+++ src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java	(working copy)
@@ -20,8 +20,8 @@
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.parse.Parse;
 import org.apache.nutch.util.NutchConfiguration;
@@ -53,7 +53,7 @@
   /**
    * Logger
    */
-  public static final Log LOG = LogFactory.getLog(SubcollectionIndexingFilter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(SubcollectionIndexingFilter.class);
 
   /**
    * "Mark" document to be a part of subcollection
Index: src/plugin/index-more/src/java/org/apache/nutch/indexer/more/MoreIndexingFilter.java
===================================================================
--- src/plugin/index-more/src/java/org/apache/nutch/indexer/more/MoreIndexingFilter.java	(revision 1169488)
+++ src/plugin/index-more/src/java/org/apache/nutch/indexer/more/MoreIndexingFilter.java	(working copy)
@@ -25,8 +25,8 @@
 import org.apache.oro.text.regex.MalformedPatternException;
 import org.apache.tika.mime.MimeType;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.metadata.Metadata;
 
@@ -69,7 +69,7 @@
  */
 
 public class MoreIndexingFilter implements IndexingFilter {
-  public static final Log LOG = LogFactory.getLog(MoreIndexingFilter.class);
+  public static final Logger LOG = LoggerFactory.getLogger(MoreIndexingFilter.class);
 
   /** A flag that tells if magic resolution must be performed */
   private boolean MAGIC;
Index: src/plugin/parse-ext/src/java/org/apache/nutch/parse/ext/ExtParser.java
===================================================================
--- src/plugin/parse-ext/src/java/org/apache/nutch/parse/ext/ExtParser.java	(revision 1169488)
+++ src/plugin/parse-ext/src/java/org/apache/nutch/parse/ext/ExtParser.java	(working copy)
@@ -35,8 +35,8 @@
 import org.apache.nutch.plugin.Extension;
 import org.apache.nutch.plugin.PluginRepository;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Hashtable;
 
@@ -52,7 +52,7 @@
 
 public class ExtParser implements Parser {
 
-  public static final Log LOG = LogFactory.getLog("org.apache.nutch.parse.ext");
+  public static final Logger LOG = LoggerFactory.getLogger("org.apache.nutch.parse.ext");
 
   static final int BUFFER_SIZE = 4096;
 
Index: src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java
===================================================================
--- src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java	(revision 1169488)
+++ src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java	(working copy)
@@ -31,8 +31,8 @@
 import org.w3c.dom.*;
 import org.apache.html.dom.*;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.apache.nutch.metadata.Metadata;
 import org.apache.nutch.metadata.Nutch;
@@ -42,7 +42,7 @@
 import org.apache.nutch.util.*;
 
 public class HtmlParser implements Parser {
-  public static final Log LOG = LogFactory.getLog("org.apache.nutch.parse.html");
+  public static final Logger LOG = LoggerFactory.getLogger("org.apache.nutch.parse.html");
 
   // I used 1000 bytes at first, but  found that some documents have 
   // meta tag well past the first 1000 bytes. 
Index: src/plugin/urlfilter-domain/src/test/org/apache/nutch/urlfilter/domain/TestDomainURLFilter.java
===================================================================
--- src/plugin/urlfilter-domain/src/test/org/apache/nutch/urlfilter/domain/TestDomainURLFilter.java	(revision 1169488)
+++ src/plugin/urlfilter-domain/src/test/org/apache/nutch/urlfilter/domain/TestDomainURLFilter.java	(working copy)
@@ -18,15 +18,15 @@
 
 import junit.framework.TestCase;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.util.NutchConfiguration;
 
 public class TestDomainURLFilter
   extends TestCase {
 
-  protected static final Log LOG = LogFactory.getLog(TestDomainURLFilter.class);
+  protected static final Logger LOG = LoggerFactory.getLogger(TestDomainURLFilter.class);
 
   private final static String SEPARATOR = System.getProperty("file.separator");
   private final static String SAMPLES = System.getProperty("test.data", ".");
Index: src/plugin/urlfilter-domain/src/java/org/apache/nutch/urlfilter/domain/DomainURLFilter.java
===================================================================
--- src/plugin/urlfilter-domain/src/java/org/apache/nutch/urlfilter/domain/DomainURLFilter.java	(revision 1169488)
+++ src/plugin/urlfilter-domain/src/java/org/apache/nutch/urlfilter/domain/DomainURLFilter.java	(working copy)
@@ -25,8 +25,8 @@
 import java.util.Set;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLFilter;
 import org.apache.nutch.plugin.Extension;
@@ -63,7 +63,7 @@
 public class DomainURLFilter
   implements URLFilter {
 
-  private static final Log LOG = LogFactory.getLog(DomainURLFilter.class);
+  private static final Logger LOG = LoggerFactory.getLogger(DomainURLFilter.class);
 
   // read in attribute "file" of this plugin.
   private static String attributeFile = null;
Index: src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java
===================================================================
--- src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java	(revision 1169488)
+++ src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java	(working copy)
@@ -30,8 +30,8 @@
 import org.w3c.dom.Node;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // HTTP Client imports
 import org.apache.commons.httpclient.Header;
@@ -62,7 +62,7 @@
  */
 public class Http extends HttpBase {
 
-  public static final Log LOG = LogFactory.getLog(Http.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Http.class);
 
   private static MultiThreadedHttpConnectionManager connectionManager =
           new MultiThreadedHttpConnectionManager();
@@ -121,8 +121,8 @@
     try {
       setCredentials();
     } catch (Exception ex) {
-      if (LOG.isFatalEnabled()) {
-        LOG.fatal("Could not read " + authFile + " : " + ex.getMessage());
+      if (LOG.isErrorEnabled()) {
+        LOG.error("Could not read " + authFile + " : " + ex.getMessage());
         ex.printStackTrace(LogUtil.getErrorStream(LOG));
       }
     }
Index: src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummySSLProtocolSocketFactory.java
===================================================================
--- src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummySSLProtocolSocketFactory.java	(revision 1169488)
+++ src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummySSLProtocolSocketFactory.java	(working copy)
@@ -35,16 +35,16 @@
 import org.apache.commons.httpclient.params.HttpConnectionParams;
 import org.apache.commons.httpclient.protocol.ControllerThreadSocketFactory;
 import org.apache.commons.httpclient.protocol.SecureProtocolSocketFactory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.net.ssl.SSLContext;
 import javax.net.ssl.TrustManager;
 
 public class DummySSLProtocolSocketFactory implements SecureProtocolSocketFactory {
 
-  /** Log object for this class. */
-  private static final Log LOG = LogFactory.getLog(DummySSLProtocolSocketFactory.class);
+  /** Logger object for this class. */
+  private static final Logger LOG = LoggerFactory.getLogger(DummySSLProtocolSocketFactory.class);
 
   private SSLContext sslcontext = null;
 
Index: src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java
===================================================================
--- src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java	(revision 1169488)
+++ src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java	(working copy)
@@ -28,8 +28,8 @@
 import org.apache.commons.codec.binary.Base64;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Hadoop imports
 import org.apache.hadoop.conf.Configuration;
@@ -46,7 +46,7 @@
  */
 public class HttpBasicAuthentication implements HttpAuthentication, Configurable {
 
-    public static final Log LOG = LogFactory.getLog(HttpBasicAuthentication.class);
+    public static final Logger LOG = LoggerFactory.getLogger(HttpBasicAuthentication.class);
 
     private static Pattern basic = Pattern.compile("[bB][aA][sS][iI][cC] [rR][eE][aA][lL][mM]=\"(\\w*)\"");
 	
Index: src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpAuthenticationFactory.java
===================================================================
--- src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpAuthenticationFactory.java	(revision 1169488)
+++ src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpAuthenticationFactory.java	(working copy)
@@ -24,8 +24,8 @@
 import java.util.TreeMap;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Hadoop imports
 import org.apache.hadoop.conf.Configuration;
@@ -56,7 +56,7 @@
      */
     public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
 	
-    public static final Log LOG = LogFactory.getLog(HttpAuthenticationFactory.class);
+    public static final Logger LOG = LoggerFactory.getLogger(HttpAuthenticationFactory.class);
 
     private static Map auths = new TreeMap(); 
 
Index: src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java
===================================================================
--- src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java	(revision 1169488)
+++ src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java	(working copy)
@@ -29,15 +29,15 @@
 import javax.net.ssl.TrustManagerFactory;
 import javax.net.ssl.TrustManager;
 import javax.net.ssl.X509TrustManager;
-import org.apache.commons.logging.Log; 
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger; 
+import org.slf4j.LoggerFactory;
 
 public class DummyX509TrustManager implements X509TrustManager
 {
     private X509TrustManager standardTrustManager = null;
 
-    /** Log object for this class. */
-    private static final Log LOG = LogFactory.getLog(DummyX509TrustManager.class);
+    /** Logger object for this class. */
+    private static final Logger LOG = LoggerFactory.getLogger(DummyX509TrustManager.class);
 
     /**
      * Constructor for DummyX509TrustManager.
Index: src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpResponse.java
===================================================================
--- src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpResponse.java	(revision 1169488)
+++ src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpResponse.java	(working copy)
@@ -170,9 +170,9 @@
         }
       }
 
-      // Log trace message
+      // Logger trace message
       if (Http.LOG.isTraceEnabled()) {
-        Http.LOG.trace(fetchTrace);
+        Http.LOG.trace(fetchTrace.toString());
       }
     } finally {
       get.releaseConnection();
Index: src/plugin/protocol-http/src/java/org/apache/nutch/protocol/http/Http.java
===================================================================
--- src/plugin/protocol-http/src/java/org/apache/nutch/protocol/http/Http.java	(revision 1169488)
+++ src/plugin/protocol-http/src/java/org/apache/nutch/protocol/http/Http.java	(working copy)
@@ -21,8 +21,8 @@
 import java.net.URL;
 
 // Commons Logging imports
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 // Hadoop imports
 import org.apache.hadoop.conf.Configuration;
@@ -37,7 +37,7 @@
 
 public class Http extends HttpBase {
 
-  public static final Log LOG = LogFactory.getLog(Http.class);
+  public static final Logger LOG = LoggerFactory.getLogger(Http.class);
 
 
   public Http() {
Index: src/plugin/protocol-http/src/java/org/apache/nutch/protocol/http/HttpResponse.java
===================================================================
--- src/plugin/protocol-http/src/java/org/apache/nutch/protocol/http/HttpResponse.java	(revision 1169488)
+++ src/plugin/protocol-http/src/java/org/apache/nutch/protocol/http/HttpResponse.java	(working copy)
@@ -117,7 +117,7 @@
 
       String userAgent = http.getUserAgent();
       if ((userAgent == null) || (userAgent.length() == 0)) {
-        if (Http.LOG.isFatalEnabled()) { Http.LOG.fatal("User-agent is not set!"); }
+        if (Http.LOG.isErrorEnabled()) { Http.LOG.error("User-agent is not set!"); }
       } else {
         reqStr.append("User-Agent: ");
         reqStr.append(userAgent);
