Index: src/java/org/apache/nutch/fetcher/FetcherOutputFormat.java
===================================================================
--- src/java/org/apache/nutch/fetcher/FetcherOutputFormat.java	(revision 417577)
+++ src/java/org/apache/nutch/fetcher/FetcherOutputFormat.java	(working copy)
@@ -31,6 +31,7 @@
 import org.apache.hadoop.mapred.RecordWriter;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.util.Progressable;
 
 import org.apache.nutch.parse.ParseOutputFormat;
 import org.apache.nutch.protocol.Content;
@@ -45,7 +46,8 @@
 
   public RecordWriter getRecordWriter(final FileSystem fs,
                                       final JobConf job,
-                                      final String name) throws IOException {
+                                      final String name,
+                                      final Progressable progress) throws IOException {
 
     final Path fetch =
       new Path(new Path(job.getOutputPath(), CrawlDatum.FETCH_DIR_NAME), name);
@@ -66,7 +68,7 @@
           }
 
           if (Fetcher.isParsing(job)) {
-            parseOut = new ParseOutputFormat().getRecordWriter(fs, job, name);
+            parseOut = new ParseOutputFormat().getRecordWriter(fs, job, name, null);
           }
         }
 
Index: src/java/org/apache/nutch/indexer/DeleteDuplicates.java
===================================================================
--- src/java/org/apache/nutch/indexer/DeleteDuplicates.java	(revision 417577)
+++ src/java/org/apache/nutch/indexer/DeleteDuplicates.java	(working copy)
@@ -26,6 +26,7 @@
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.util.Progressable;
 
 import org.apache.nutch.util.NutchConfiguration;
 import org.apache.nutch.util.NutchJob;
@@ -276,7 +277,8 @@
   /** Write nothing. */
   public RecordWriter getRecordWriter(final FileSystem fs,
                                       final JobConf job,
-                                      final String name) throws IOException {
+                                      final String name,
+                                      final Progressable progress) throws IOException {
     return new RecordWriter() {                   
         public void write(WritableComparable key, Writable value)
           throws IOException {
Index: src/java/org/apache/nutch/indexer/Indexer.java
===================================================================
--- src/java/org/apache/nutch/indexer/Indexer.java	(revision 417577)
+++ src/java/org/apache/nutch/indexer/Indexer.java	(working copy)
@@ -27,6 +27,7 @@
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.util.Progressable;
 import org.apache.nutch.parse.*;
 import org.apache.nutch.analysis.*;
 
@@ -79,7 +80,7 @@
   public static class OutputFormat
     extends org.apache.hadoop.mapred.OutputFormatBase {
     public RecordWriter getRecordWriter(final FileSystem fs, JobConf job,
-                                        String name) throws IOException {
+                                        String name, Progressable progress) throws IOException {
       final Path perm = new Path(job.getOutputPath(), name);
       final Path temp =
         job.getLocalPath("index/_"+Integer.toString(new Random().nextInt()));
Index: src/java/org/apache/nutch/segment/SegmentMerger.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentMerger.java	(revision 417577)
+++ src/java/org/apache/nutch/segment/SegmentMerger.java	(working copy)
@@ -28,6 +28,7 @@
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.util.Progressable;
 import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.nutch.crawl.Generator;
 import org.apache.nutch.fetcher.Fetcher;
@@ -168,7 +169,7 @@
   public static class SegmentOutputFormat extends org.apache.hadoop.mapred.OutputFormatBase {
     private static final String DEFAULT_SLICE = "default";
     
-    public RecordWriter getRecordWriter(final FileSystem fs, final JobConf job, final String name) throws IOException {
+    public RecordWriter getRecordWriter(final FileSystem fs, final JobConf job, final String name, final Progressable progress) throws IOException {
       return new RecordWriter() {
         MapFile.Writer c_out = null;
         MapFile.Writer f_out = null;
Index: src/java/org/apache/nutch/segment/SegmentReader.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentReader.java	(revision 417577)
+++ src/java/org/apache/nutch/segment/SegmentReader.java	(working copy)
@@ -28,6 +28,7 @@
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.io.*;
 import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.util.Progressable;
 import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.nutch.parse.ParseData;
 import org.apache.nutch.parse.ParseText;
@@ -70,7 +71,7 @@
 
   /** Implements a text output format */
   public static class TextOutputFormat extends org.apache.hadoop.mapred.OutputFormatBase {
-    public RecordWriter getRecordWriter(final FileSystem fs, JobConf job, String name) throws IOException {
+    public RecordWriter getRecordWriter(final FileSystem fs, JobConf job, String name, Progressable progress) throws IOException {
 
       final Path segmentDumpFile = new Path(job.getOutputPath(), name);
 
Index: src/java/org/apache/nutch/parse/ParseOutputFormat.java
===================================================================
--- src/java/org/apache/nutch/parse/ParseOutputFormat.java	(revision 417577)
+++ src/java/org/apache/nutch/parse/ParseOutputFormat.java	(working copy)
@@ -31,6 +31,7 @@
 import org.apache.nutch.net.*;
 
 import java.io.*;
+import org.apache.hadoop.util.Progressable;
 
 /* Parse content in a segment. */
 public class ParseOutputFormat implements OutputFormat {
@@ -46,7 +47,7 @@
   }
 
   public RecordWriter getRecordWriter(FileSystem fs, JobConf job,
-                                      String name) throws IOException {
+                                      String name, Progressable progress) throws IOException {
 
     this.urlNormalizer = new UrlNormalizerFactory(job).getNormalizer();
     this.filters = new URLFilters(job);
