Index: src/plugin/lib-regex-filter/src/test/org/apache/nutch/urlfilter/api/RegexURLFilterBaseTest.java
===================================================================
--- src/plugin/lib-regex-filter/src/test/org/apache/nutch/urlfilter/api/RegexURLFilterBaseTest.java	(revision 1480500)
+++ src/plugin/lib-regex-filter/src/test/org/apache/nutch/urlfilter/api/RegexURLFilterBaseTest.java	(working copy)
@@ -109,7 +109,7 @@
   
   private static FilteredURL[] readURLFile(Reader reader) throws IOException {
     BufferedReader in = new BufferedReader(reader);
-    List list = new ArrayList();
+    List<FilteredURL> list = new ArrayList<FilteredURL>();
     String line;
     while((line=in.readLine()) != null) {
       if (line.length() != 0) {
Index: src/plugin/parse-ext/src/java/org/apache/nutch/parse/ext/ExtParser.java
===================================================================
--- src/plugin/parse-ext/src/java/org/apache/nutch/parse/ext/ExtParser.java	(revision 1480500)
+++ src/plugin/parse-ext/src/java/org/apache/nutch/parse/ext/ExtParser.java	(working copy)
@@ -21,14 +21,12 @@
 import org.apache.nutch.parse.ParseResult;
 import org.apache.nutch.parse.ParseStatus;
 import org.apache.nutch.parse.Parser;
-import org.apache.nutch.parse.Parse;
 import org.apache.nutch.parse.ParseData;
 import org.apache.nutch.parse.ParseImpl;
 import org.apache.nutch.parse.Outlink;
 import org.apache.nutch.parse.OutlinkExtractor;
 
 import org.apache.nutch.util.CommandRunner;
-import org.apache.nutch.metadata.Metadata;
 import org.apache.nutch.net.protocols.Response;
 import org.apache.hadoop.conf.Configuration;
 
@@ -59,12 +57,10 @@
   static final int TIMEOUT_DEFAULT = 30; // in seconds
 
   // handy map from String contentType to String[] {command, timeoutString, encoding}
-  Hashtable TYPE_PARAMS_MAP = new Hashtable();
+  Hashtable<String, String[]> TYPE_PARAMS_MAP = new Hashtable<String, String[]>();
 
   private Configuration conf;  
 
-  private boolean loaded = false;
-
   public ExtParser () { }
 
   public ParseResult getParse(Content content) {
Index: src/plugin/parse-html/src/java/org/apache/nutch/parse/html/DOMBuilder.java
===================================================================
--- src/plugin/parse-html/src/java/org/apache/nutch/parse/html/DOMBuilder.java	(revision 1480500)
+++ src/plugin/parse-html/src/java/org/apache/nutch/parse/html/DOMBuilder.java	(working copy)
@@ -58,7 +58,7 @@
   public DocumentFragment m_docFrag = null;
 
   /** Vector of element nodes          */
-  protected Stack m_elemStack = new Stack();
+  protected Stack<Element> m_elemStack = new Stack<Element>();
 
   /**
    * DOMBuilder instance constructor... it will add the DOM nodes
Index: src/plugin/urlfilter-prefix/src/java/org/apache/nutch/urlfilter/prefix/PrefixURLFilter.java
===================================================================
--- src/plugin/urlfilter-prefix/src/java/org/apache/nutch/urlfilter/prefix/PrefixURLFilter.java	(revision 1480500)
+++ src/plugin/urlfilter-prefix/src/java/org/apache/nutch/urlfilter/prefix/PrefixURLFilter.java	(working copy)
@@ -15,8 +15,6 @@
  * limitations under the License.
  */
 
-// $Id$
-
 package org.apache.nutch.urlfilter.prefix;
 
 import org.slf4j.Logger;
@@ -32,7 +30,6 @@
 import org.apache.nutch.plugin.PluginRepository;
 
 import java.io.Reader;
-import java.io.FileReader;
 import java.io.BufferedReader;
 import java.io.InputStreamReader;
 import java.io.IOException;
@@ -79,7 +76,7 @@
     throws IOException {
     
     BufferedReader in=new BufferedReader(reader);
-    List urlprefixes = new ArrayList();
+    List<String> urlprefixes = new ArrayList<String>();
     String line;
 
     while((line=in.readLine())!=null) {
@@ -91,7 +88,7 @@
       case ' ' : case '\n' : case '#' :           // skip blank & comment lines
         continue;
       default :
-	urlprefixes.add(line);
+        urlprefixes.add(line);
       }
     }
 
Index: src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java
===================================================================
--- src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java	(revision 1480500)
+++ src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java	(working copy)
@@ -17,11 +17,9 @@
 
 package org.apache.nutch.protocol.ftp;
 
-
 import org.apache.commons.net.ftp.FTP;
 import org.apache.commons.net.ftp.FTPFile;
 import org.apache.commons.net.ftp.FTPReply;
-
 import org.apache.commons.net.ftp.parser.DefaultFTPFileEntryParserFactory;
 import org.apache.commons.net.ftp.parser.ParserInitializationException;
 
@@ -42,8 +40,7 @@
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 
-
-/************************************
+/**
  * FtpResponse.java mimics ftp replies as http response.
  * It tries its best to follow http's way for headers, response codes
  * as well as exceptions.
@@ -53,9 +50,7 @@
  * and some important commons-net exceptions passed by Client.java
  * must have been properly dealt with. They'd better not be leaked
  * to the caller of this class.
- *
- * @author John Xing
- ***********************************/
+ */
 public class FtpResponse {
 
   private String orig;
@@ -146,7 +141,7 @@
         // follow ftp talk?
         if (ftp.followTalk)
           ftp.client.addProtocolCommandListener(
-            new PrintCommandListener(ftp.LOG));
+            new PrintCommandListener(Ftp.LOG));
       }
 
       // quit from previous site if at a different site now
@@ -284,8 +279,8 @@
       }
       
     } catch (Exception e) {
-      if (ftp.LOG.isWarnEnabled()) {
-        ftp.LOG.warn("Error: ", e);
+      if (Ftp.LOG.isWarnEnabled()) {
+        Ftp.LOG.warn("Error: ", e);
       }
       // for any un-foreseen exception (run time exception or not),
       // do ultimate clean and leave ftp.client for garbage collection
@@ -312,11 +307,11 @@
     throws IOException {
 
     ByteArrayOutputStream os = null;
-    List list = null;
+    List<FTPFile> list = null;
 
     try {
       // first get its possible attributes
-      list = new LinkedList();
+      list = new LinkedList<FTPFile>();
       ftp.client.retrieveList(path, list, ftp.maxContentLength, ftp.parser);
 
       FTPFile ftpFile = (FTPFile) list.get(0);
@@ -329,7 +324,7 @@
         code = 304;
         return;
       }
-      os = new ByteArrayOutputStream(ftp.BUFFER_SIZE);
+      os = new ByteArrayOutputStream(ftp.getBufferSize());
       ftp.client.retrieveFile(path, os, ftp.maxContentLength);
 
       this.content = os.toByteArray();
@@ -414,7 +409,7 @@
   // get ftp dir list as http response
   private void getDirAsHttpResponse(String path, long lastModified)
     throws IOException {
-    List list = new LinkedList();
+    List<FTPFile> list = new LinkedList<FTPFile>();
 
     try {
 
@@ -482,7 +477,7 @@
   }
 
   // generate html page from ftp dir list
-  private byte[] list2html(List list, String path, boolean includeDotDot) {
+  private byte[] list2html(List<FTPFile> list, String path, boolean includeDotDot) {
 
     //StringBuffer x = new StringBuffer("<!doctype html public \"-//ietf//dtd html//en\"><html><head>");
     StringBuffer x = new StringBuffer("<html><head>");
Index: src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Client.java
===================================================================
--- src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Client.java	(revision 1480500)
+++ src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Client.java	(working copy)
@@ -77,9 +77,9 @@
     private int __dataTimeout;
     private int __passivePort;
     private String __passiveHost;
-    private int __fileType, __fileFormat;
+//    private int __fileType, __fileFormat;
     private boolean __remoteVerificationEnabled;
-    private FTPFileEntryParser __entryParser;
+//    private FTPFileEntryParser __entryParser;
     private String __systemName;
 
     // constructor
@@ -95,10 +95,10 @@
     {
         __passiveHost        = null;
         __passivePort        = -1;
-        __fileType           = FTP.ASCII_FILE_TYPE;
-        __fileFormat         = FTP.NON_PRINT_TEXT_FORMAT;
         __systemName         = null;
-        __entryParser        = null;
+//        __fileType           = FTP.ASCII_FILE_TYPE;
+//        __fileFormat         = FTP.NON_PRINT_TEXT_FORMAT;
+//        __entryParser        = null;
     }
 
     // parse reply for pass()
@@ -315,7 +315,7 @@
     }
 
     // retrieve list reply for path
-    public void retrieveList(String path, List entries, int limit,
+    public void retrieveList(String path, List<FTPFile> entries, int limit,
       FTPFileEntryParser parser)
       throws IOException,
         FtpExceptionCanNotHaveDataConnection,
@@ -331,7 +331,7 @@
           new BufferedReader(new InputStreamReader(socket.getInputStream()));
 
       // force-close data channel socket, when download limit is reached
-      boolean mandatory_close = false;
+//      boolean mandatory_close = false;
 
       //List entries = new LinkedList();
       int count = 0;
@@ -348,7 +348,7 @@
         // impose download limit if limit >= 0, otherwise no limit
         // here, cut off is up to the line when total bytes is just over limit
         if (limit >= 0 && count > limit) {
-          mandatory_close = true;
+//          mandatory_close = true;
           break;
         }
         line = parser.readNextEntry(reader);
@@ -403,7 +403,7 @@
       // fixme, should we instruct server here for binary file type?
 
       // force-close data channel socket
-      boolean mandatory_close = false;
+      // boolean mandatory_close = false;
 
       int len; int count = 0;
       byte[] buf =
@@ -414,7 +414,7 @@
         // here, cut off is exactly of limit bytes
         if (limit >= 0 && count > limit) {
           os.write(buf,0,len-(count-limit));
-          mandatory_close = true;
+       //   mandatory_close = true;
           break;
         }
         os.write(buf,0,len);
@@ -502,8 +502,8 @@
     {
         if (FTPReply.isPositiveCompletion(type(fileType)))
         {
-            __fileType = fileType;
-            __fileFormat = FTP.NON_PRINT_TEXT_FORMAT;
+/*            __fileType = fileType;
+            __fileFormat = FTP.NON_PRINT_TEXT_FORMAT;*/
             return true;
         }
         return false;
Index: src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java
===================================================================
--- src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java	(revision 1480500)
+++ src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java	(working copy)
@@ -55,7 +55,7 @@
 
   public static final Logger LOG = LoggerFactory.getLogger(Ftp.class);
 
-  static final int BUFFER_SIZE = 16384; // 16*1024 = 16384
+  private static final int BUFFER_SIZE = 16384; // 16*1024 = 16384
 
   static final int MAX_REDIRECTS = 5;
 
@@ -257,5 +257,9 @@
   public BaseRobotRules getRobotRules(Text url, CrawlDatum datum) {
     return RobotRulesParser.EMPTY_RULES;
   }
+
+  public int getBufferSize() {
+    return BUFFER_SIZE;
+  }
 }
 
Index: src/plugin/creativecommons/src/java/org/creativecommons/nutch/CCParseFilter.java
===================================================================
--- src/plugin/creativecommons/src/java/org/creativecommons/nutch/CCParseFilter.java	(revision 1480500)
+++ src/plugin/creativecommons/src/java/org/creativecommons/nutch/CCParseFilter.java	(working copy)
@@ -22,7 +22,6 @@
 import org.apache.nutch.protocol.Content;
 import org.apache.nutch.metadata.Metadata;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -217,11 +216,6 @@
           if (!CC_NS.equals(predicateElement.getNamespaceURI())) {
             continue;
           }
-          String predicate = predicateElement.getLocalName();
-
-          // object is rdf:resource from cc:xxx predicates
-          String object =
-            predicateElement.getAttributeNodeNS(RDF_NS, "resource").getValue();
         
           // add object and predicate to metadata
           // metadata.put(object, predicate);
@@ -234,22 +228,19 @@
       // get cc:Work nodes from rdf:RDF
       NodeList works = rdf.getElementsByTagNameNS(CC_NS, "Work");
       for (int i = 0; i < works.getLength(); i++) {
-        Element l = (Element)works.item(i);
-        
         // get dc:type nodes from cc:Work
         NodeList types = rdf.getElementsByTagNameNS(DC_NS, "type");
+        
         for (int j = 0; j < types.getLength(); j++) {
           Element type = (Element)types.item(j);
-          String workUri = 
-            type.getAttributeNodeNS(RDF_NS, "resource").getValue();
-          this.workType = (String)WORK_TYPE_NAMES.get(workUri);
-          break;
+          String workUri = type.getAttributeNodeNS(RDF_NS, "resource").getValue();
+          this.workType = WORK_TYPE_NAMES.get(workUri);
         }
       }
     }
   }
 
-  private static final HashMap WORK_TYPE_NAMES = new HashMap();
+  private static final HashMap<String, String> WORK_TYPE_NAMES = new HashMap<String, String>();
   static {
     WORK_TYPE_NAMES.put("http://purl.org/dc/dcmitype/MovingImage", "video");
     WORK_TYPE_NAMES.put("http://purl.org/dc/dcmitype/StillImage", "image");
Index: src/plugin/protocol-httpclient/src/test/org/apache/nutch/protocol/httpclient/TestProtocolHttpClient.java
===================================================================
--- src/plugin/protocol-httpclient/src/test/org/apache/nutch/protocol/httpclient/TestProtocolHttpClient.java	(revision 1480500)
+++ src/plugin/protocol-httpclient/src/test/org/apache/nutch/protocol/httpclient/TestProtocolHttpClient.java	(working copy)
@@ -23,7 +23,6 @@
 import org.mortbay.jetty.Server;
 import org.mortbay.jetty.bio.SocketConnector;
 import org.mortbay.jetty.handler.ContextHandler;
-import org.mortbay.jetty.handler.ResourceHandler;
 import org.mortbay.jetty.servlet.ServletHandler;
 import org.mortbay.jetty.servlet.SessionHandler;
 import org.apache.hadoop.conf.Configuration;
@@ -32,8 +31,6 @@
 
 /**
  * Test cases for protocol-httpclient.
- *
- * @author Susam Pal
  */
 public class TestProtocolHttpClient extends TestCase {
 
Index: src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java
===================================================================
--- src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java	(revision 1480500)
+++ src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java	(working copy)
@@ -42,6 +42,8 @@
 import org.apache.commons.httpclient.auth.AuthScope;
 import org.apache.commons.httpclient.params.HttpConnectionManagerParams;
 import org.apache.commons.httpclient.protocol.Protocol;
+import org.apache.commons.httpclient.protocol.ProtocolSocketFactory;
+import org.apache.commons.httpclient.protocol.SSLProtocolSocketFactory;
 
 // Nutch imports
 import org.apache.nutch.crawl.CrawlDatum;
@@ -158,8 +160,8 @@
   private void configureClient() {
 
     // Set up an HTTPS socket factory that accepts self-signed certs.
-    Protocol https = new Protocol("https",
-        new DummySSLProtocolSocketFactory(), 443);
+    ProtocolSocketFactory factory = new SSLProtocolSocketFactory();
+    Protocol https = new Protocol("https", factory, 443);
     Protocol.registerProtocol("https", https);
 
     HttpConnectionManagerParams params = connectionManager.getParams();
@@ -174,7 +176,7 @@
     client.getParams().setConnectionManagerTimeout(timeout);
 
     HostConfiguration hostConf = client.getHostConfiguration();
-    ArrayList headers = new ArrayList();
+    ArrayList<Header> headers = new ArrayList<Header>();
     // Set the User Agent in the header
     headers.add(new Header("User-Agent", userAgent));
     // prefer English
@@ -199,7 +201,7 @@
 
         NTCredentials proxyCredentials = new NTCredentials(
             this.proxyUsername, this.proxyPassword,
-            this.agentHost, this.proxyRealm);
+            Http.agentHost, this.proxyRealm);
 
         client.getState().setProxyCredentials(
             proxyAuthScope, proxyCredentials);
Index: src/plugin/microformats-reltag/src/java/org/apache/nutch/microformats/reltag/RelTagParser.java
===================================================================
--- src/plugin/microformats-reltag/src/java/org/apache/nutch/microformats/reltag/RelTagParser.java	(revision 1480500)
+++ src/plugin/microformats-reltag/src/java/org/apache/nutch/microformats/reltag/RelTagParser.java	(working copy)
@@ -42,15 +42,12 @@
 
 // Hadoop imports
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
 
-
 /**
  * Adds microformat rel-tags of document if found.
  *
  * @see <a href="http://www.microformats.org/wiki/rel-tag">
  *      http://www.microformats.org/wiki/rel-tag</a>
- * @author J&eacute;r&ocirc;me Charron
  */
 public class RelTagParser implements HtmlParseFilter {
   
@@ -58,10 +55,8 @@
 
   public final static String REL_TAG = "Rel-Tag";
   
-  
   private Configuration conf = null;
   
-  
   /**
    * Scan the HTML document looking at possible rel-tags
    */
@@ -72,25 +67,25 @@
     Parse parse = parseResult.get(content.getUrl());
     // Trying to find the document's rel-tags
     Parser parser = new Parser(doc);
-    Set tags = parser.getRelTags();
-    Iterator iter = tags.iterator();
+    Set<?> tags = parser.getRelTags();
+    Iterator<?> iter = tags.iterator();
     Metadata metadata = parse.getData().getParseMeta();
-    while (iter.hasNext()) {
+    while (iter.hasNext())
       metadata.add(REL_TAG, (String) iter.next());
-    }
+
     return parseResult;
   }
 
   private static class Parser {
 
-    Set tags = null;
+    Set<String> tags = null;
     
     Parser(Node node) {
-      tags = new TreeSet();
+      tags = new TreeSet<String>();
       parse(node);
     }
   
-    Set getRelTags() {
+    Set<String> getRelTags() {
       return tags;
     }
     
@@ -120,9 +115,8 @@
       
       // Recurse
       NodeList children = node.getChildNodes();
-      for (int i=0; children != null && i<children.getLength(); i++) {
+      for (int i=0; children != null && i<children.getLength(); i++)
         parse(children.item(i));
-      }
     }
     
     private final static String parseTag(String url) {
@@ -140,11 +134,6 @@
     
   }
 
-
-  /* ----------------------------- *
-   * <implementation:Configurable> *
-   * ----------------------------- */
-  
   public void setConf(Configuration conf) {
     this.conf = conf;
   }
@@ -152,9 +141,4 @@
   public Configuration getConf() {
     return this.conf;
   }
-  
-  /* ------------------------------ *
-   * </implementation:Configurable> *
-   * ------------------------------ */
-  
 }
Index: src/plugin/parse-js/src/java/org/apache/nutch/parse/js/JSParseFilter.java
===================================================================
--- src/plugin/parse-js/src/java/org/apache/nutch/parse/js/JSParseFilter.java	(revision 1480500)
+++ src/plugin/parse-js/src/java/org/apache/nutch/parse/js/JSParseFilter.java	(working copy)
@@ -42,7 +42,6 @@
 import org.apache.nutch.protocol.Content;
 import org.apache.nutch.util.NutchConfiguration;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
 import org.apache.oro.text.regex.MatchResult;
 import org.apache.oro.text.regex.Pattern;
 import org.apache.oro.text.regex.PatternCompiler;
@@ -60,9 +59,6 @@
  * This class is a heuristic link extractor for JavaScript files and
  * code snippets. The general idea of a two-pass regex matching comes from
  * Heritrix. Parts of the code come from OutlinkExtractor.java
- * by Stephan Strittmatter.
- *
- * @author Andrzej Bialecki &lt;ab@getopt.org&gt;
  */
 public class JSParseFilter implements HtmlParseFilter, Parser {
   public static final Logger LOG = LoggerFactory.getLogger(JSParseFilter.class);
@@ -77,12 +73,12 @@
     Parse parse = parseResult.get(content.getUrl());
 
     String url = content.getBaseUrl();
-    ArrayList outlinks = new ArrayList();
+    ArrayList<Outlink> outlinks = new ArrayList<Outlink>();
     walk(doc, parse, metaTags, url, outlinks);
     if (outlinks.size() > 0) {
       Outlink[] old = parse.getData().getOutlinks();
       String title = parse.getData().getTitle();
-      List list = Arrays.asList(old);
+      List<Outlink> list = Arrays.asList(old);
       outlinks.addAll(list);
       ParseStatus status = parse.getData().getStatus();
       String text = parse.getText();
@@ -97,14 +93,14 @@
     return parseResult;
   }
   
-  private void walk(Node n, Parse parse, HTMLMetaTags metaTags, String base, List outlinks) {
+  private void walk(Node n, Parse parse, HTMLMetaTags metaTags, String base, List<Outlink> outlinks) {
     if (n instanceof Element) {
       String name = n.getNodeName();
       if (name.equalsIgnoreCase("script")) {
-        String lang = null;
+ /*       String lang = null;
         Node lNode = n.getAttributes().getNamedItem("language");
         if (lNode == null) lang = "javascript";
-        else lang = lNode.getNodeValue();
+        else lang = lNode.getNodeValue(); */
         StringBuffer script = new StringBuffer();
         NodeList nn = n.getChildNodes();
         if (nn.getLength() > 0) {
@@ -183,7 +179,7 @@
    */
   private Outlink[] getJSLinks(String plainText, String anchor, String base) {
 
-    final List outlinks = new ArrayList();
+    final List<Outlink> outlinks = new ArrayList<Outlink>();
     URL baseURL = null;
     
     try {
@@ -265,7 +261,10 @@
     BufferedReader br = new BufferedReader(new InputStreamReader(in, "UTF-8"));
     StringBuffer sb = new StringBuffer();
     String line = null;
-    while ((line = br.readLine()) != null) sb.append(line + "\n");
+    while ((line = br.readLine()) != null) 
+      sb.append(line + "\n");
+    br.close();
+    
     JSParseFilter parseFilter = new JSParseFilter();
     parseFilter.setConf(NutchConfiguration.create());
     Outlink[] links = parseFilter.getJSLinks(sb.toString(), "", args[1]);
Index: src/plugin/parse-swf/src/test/org/apache/nutch/parse/swf/TestSWFParser.java
===================================================================
--- src/plugin/parse-swf/src/test/org/apache/nutch/parse/swf/TestSWFParser.java	(revision 1480500)
+++ src/plugin/parse-swf/src/test/org/apache/nutch/parse/swf/TestSWFParser.java	(working copy)
@@ -32,13 +32,12 @@
 import org.apache.nutch.parse.ParseException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.util.NutchConfiguration;
+import org.mortbay.log.Log;
 
 import junit.framework.TestCase;
 
 /** 
  * Unit tests for SWFParser.
- *
- * @author Andrzej Bialecki
  */
 public class TestSWFParser extends TestCase {
 
@@ -48,7 +47,6 @@
   
   private String[] sampleFiles = new String[]{"test1.swf", "test2.swf", "test3.swf"};
   private String[] sampleTexts = new String[]{"test1.txt", "test2.txt", "test3.txt"};
-  private String[] texts = new String[sampleTexts.length];
 
   public TestSWFParser(String name) { 
     super(name);
@@ -94,5 +92,4 @@
       assertTrue(sampleTexts[i].equals(text));
     }
   }
-
 }
Index: src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java
===================================================================
--- src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java	(revision 1480500)
+++ src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java	(working copy)
@@ -42,8 +42,6 @@
 /**
  * Parser for Flash SWF files. Loosely based on the sample in JavaSWF
  * distribution.
- * 
- * @author Andrzej Bialecki
  */
 public class SWFParser implements Parser {
   public static final Logger LOG = LoggerFactory.getLogger("org.apache.nutch.parse.swf");
@@ -63,7 +61,7 @@
   public ParseResult getParse(Content content) {
 
     String text = null;
-    Vector outlinks = new Vector();
+    Vector<Outlink> outlinks = new Vector<Outlink>();
 
     try {
 
@@ -120,6 +118,7 @@
 
     byte[] buf = new byte[in.available()];
     in.read(buf);
+    in.close();
     SWFParser parser = new SWFParser();
     ParseResult parseResult = parser.getParse(new Content("file:" + args[0], "file:" + args[0],
                                           buf, "application/x-shockwave-flash",
@@ -153,13 +152,13 @@
    * character codes for the correspnding font glyphs (An empty array denotes a
    * System Font).
    */
-  protected HashMap fontCodes = new HashMap();
+  protected HashMap<Integer, int[]> fontCodes = new HashMap<Integer, int[]>();
 
-  public ArrayList strings = new ArrayList();
+  public ArrayList<String> strings = new ArrayList<String>();
 
-  public HashSet actionStrings = new HashSet();
+  public HashSet<String> actionStrings = new HashSet<String>();
 
-  public ArrayList urls = new ArrayList();
+  public ArrayList<String> urls = new ArrayList<String>();
 
   public ExtractText() {
     super(null);
@@ -167,7 +166,7 @@
 
   public String getText() {
     StringBuffer res = new StringBuffer();
-    Iterator it = strings.iterator();
+    Iterator<String> it = strings.iterator();
     while (it.hasNext()) {
       if (res.length() > 0) res.append(' ');
       res.append(it.next());
@@ -189,7 +188,7 @@
   public String[] getUrls() {
     String[] res = new String[urls.size()];
     int i = 0;
-    Iterator it = urls.iterator();
+    Iterator<String> it = urls.iterator();
     while (it.hasNext()) {
       res[i] = (String) it.next();
       i++;
@@ -350,26 +349,23 @@
  * ActionScript parser. This parser tries to extract free text embedded inside
  * the script, but without polluting it too much with names of variables,
  * methods, etc. Not ideal, but it works.
- * 
- * @author Andrzej Bialecki
  */
 class NutchSWFActions extends SWFActionBlockImpl implements SWFActions {
-  private HashSet strings = null;
+  private HashSet<String> strings = null;
 
-  private ArrayList urls = null;
+  private ArrayList<String> urls = null;
 
   String[] dict = null;
 
-  Stack stack = null;
+  Stack<Object> stack = null;
 
-  public NutchSWFActions(HashSet strings, ArrayList urls) {
+  public NutchSWFActions(HashSet<String> strings, ArrayList<String> urls) {
     this.strings = strings;
     this.urls = urls;
     stack = new SmallStack(100, strings);
   }
 
   public void lookupTable(String[] values) throws IOException {
-    // System.out.println("-lookupTable: " + values.length);
     for (int i = 0; i < values.length; i++) {
       if (!strings.contains(values[i])) strings.add(values[i]);
     }
@@ -378,7 +374,6 @@
   }
 
   public void defineLocal() throws IOException {
-    // System.out.println("-defineLocal");
     stack.pop();
     super.defineLocal();
   }
@@ -398,69 +393,58 @@
   }
 
   public SWFActionBlock.TryCatchFinally _try(String var) throws IOException {
-    // System.out.println("_try: var=" + var);
     // stack.push(var);
     strings.remove(var);
     return super._try(var);
   }
 
   public void comment(String var) throws IOException {
-    // System.out.println("-comment: var=" + var);
     // stack.push(var);
     strings.remove(var);
     super.comment(var);
   }
 
   public void goToFrame(String var) throws IOException {
-    // System.out.println("-goToFrame: var=" + var);
     stack.push(var);
     strings.remove(var);
     super.gotoFrame(var);
   }
 
   public void ifJump(String var) throws IOException {
-    // System.out.println("-ifJump: var=" + var);
     strings.remove(var);
     super.ifJump(var);
   }
 
   public void jump(String var) throws IOException {
-    // System.out.println("-jump: var=" + var);
     strings.remove(var);
     super.jump(var);
   }
 
   public void jumpLabel(String var) throws IOException {
-    // System.out.println("-jumpLabel: var=" + var);
     strings.remove(var);
     super.jumpLabel(var);
   }
 
   public void lookup(int var) throws IOException {
-    // System.out.println("-lookup: var=" + var);
     if (dict != null && var >= 0 && var < dict.length) {
-      // System.out.println(" push " + dict[var]);
       stack.push(dict[var]);
     }
     super.lookup(var);
   }
 
   public void push(String var) throws IOException {
-    // System.out.println("-push: var=" + var);
     stack.push(var);
     strings.remove(var);
     super.push(var);
   }
 
   public void setTarget(String var) throws IOException {
-    // System.out.println("-setTarget: var=" + var);
     stack.push(var);
     strings.remove(var);
     super.setTarget(var);
   }
 
   public SWFActionBlock startFunction(String var, String[] params) throws IOException {
-    // System.out.println("-startFunction1: var=" + var);
     stack.push(var);
     strings.remove(var);
     if (params != null) {
@@ -472,7 +456,6 @@
   }
 
   public SWFActionBlock startFunction2(String var, int arg1, int arg2, String[] params, int[] arg3) throws IOException {
-    // System.out.println("-startFunction2: var=" + var);
     stack.push(var);
     strings.remove(var);
     if (params != null) {
@@ -484,74 +467,61 @@
   }
 
   public void waitForFrame(int num, String var) throws IOException {
-    // System.out.println("-waitForFrame: var=" + var);
     stack.push(var);
     strings.remove(var);
     super.waitForFrame(num, var);
   }
 
   public void waitForFrame(String var) throws IOException {
-    // System.out.println("-waitForFrame: var=" + var);
     stack.push(var);
     strings.remove(var);
     super.waitForFrame(var);
   }
 
   public void done() throws IOException {
-    // System.out.println("-done");
     while (stack.size() > 0) {
       strings.remove(stack.pop());
     }
   }
 
   public SWFActionBlock start(int arg0, int arg1) throws IOException {
-    // System.out.println("-start");
     return this;
   }
 
   public SWFActionBlock start(int arg0) throws IOException {
-    // System.out.println("-start");
     return this;
   }
 
   public void add() throws IOException {
-    // System.out.println("-add");
     super.add();
   }
 
   public void asciiToChar() throws IOException {
-    // System.out.println("-asciitochar");
     super.asciiToChar();
   }
 
   public void asciiToCharMB() throws IOException {
-    // System.out.println("-asciitocharMB");
     super.asciiToCharMB();
   }
 
   public void push(int var) throws IOException {
-    // System.out.println("-push(int)");
     if (dict != null && var >= 0 && var < dict.length) {
-      // System.out.println(" push " + dict[var]);
       stack.push(dict[var]);
     }
     super.push(var);
   }
 
   public void callFunction() throws IOException {
-    // System.out.println("-callFunction");
     strings.remove(stack.pop());
     super.callFunction();
   }
 
   public void callMethod() throws IOException {
-    // System.out.println("-callMethod");
     strings.remove(stack.pop());
     super.callMethod();
   }
 
   public void getMember() throws IOException {
-    // System.out.println("-getMember");
     // 0: name
     String val = (String) stack.pop();
     strings.remove(val);
@@ -560,116 +530,97 @@
 
   public void setMember() throws IOException {
     // 0: value -1: name
-    String val = (String) stack.pop();
+    stack.pop(); // value
     String name = (String) stack.pop();
-    // System.out.println("-setMember: name=" + name + ", val=" + val);
     strings.remove(name);
     super.setMember();
   }
 
   public void setProperty() throws IOException {
-    // System.out.println("-setProperty");
     super.setProperty();
   }
 
   public void setVariable() throws IOException {
-    // System.out.println("-setVariable");
     super.setVariable();
   }
 
   public void call() throws IOException {
-    // System.out.println("-call");
     strings.remove(stack.pop());
     super.call();
   }
 
   public void setTarget() throws IOException {
-    // System.out.println("-setTarget");
     strings.remove(stack.pop());
     super.setTarget();
   }
 
   public void pop() throws IOException {
-    // System.out.println("-pop");
     strings.remove(stack.pop());
     super.pop();
   }
 
   public void push(boolean arg0) throws IOException {
-    // System.out.println("-push(b)");
     stack.push("" + arg0);
     super.push(arg0);
   }
 
   public void push(double arg0) throws IOException {
-    // System.out.println("-push(d)");
     stack.push("" + arg0);
     super.push(arg0);
   }
 
   public void push(float arg0) throws IOException {
-    // System.out.println("-push(f)");
     stack.push("" + arg0);
     super.push(arg0);
   }
 
   public void pushNull() throws IOException {
-    // System.out.println("-push(null)");
     stack.push("");
     super.pushNull();
   }
 
   public void pushRegister(int arg0) throws IOException {
-    // System.out.println("-push(reg)");
     stack.push("" + arg0);
     super.pushRegister(arg0);
   }
 
   public void pushUndefined() throws IOException {
-    // System.out.println("-push(undef)");
     stack.push("???");
     super.pushUndefined();
   }
 
   public void getProperty() throws IOException {
-    // System.out.println("-getProperty");
     stack.pop();
     super.getProperty();
   }
 
   public void getVariable() throws IOException {
-    // System.out.println("-getVariable");
     strings.remove(stack.pop());
     super.getVariable();
   }
 
   public void gotoFrame(boolean arg0) throws IOException {
-    // System.out.println("-gotoFrame(b)");
     stack.push("" + arg0);
     super.gotoFrame(arg0);
   }
 
   public void gotoFrame(int arg0) throws IOException {
-    // System.out.println("-gotoFrame(int)");
     stack.push("" + arg0);
     super.gotoFrame(arg0);
   }
 
   public void gotoFrame(String arg0) throws IOException {
-    // System.out.println("-gotoFrame(string)");
     stack.push("" + arg0);
     strings.remove(arg0);
     super.gotoFrame(arg0);
   }
 
   public void newObject() throws IOException {
-    // System.out.println("-newObject");
     stack.pop();
     super.newObject();
   }
 
   public SWFActionBlock startWith() throws IOException {
-    // System.out.println("-startWith");
     return this;
   }
 
@@ -678,13 +629,15 @@
 /*
  * Small bottom-less stack.
  */
-class SmallStack extends Stack {
+class SmallStack extends Stack<Object> {
 
+  private static final long serialVersionUID = 1L;
+
   private int maxSize;
 
-  private HashSet strings = null;
+  private HashSet<String> strings = null;
 
-  public SmallStack(int maxSize, HashSet strings) {
+  public SmallStack(int maxSize, HashSet<String> strings) {
     this.maxSize = maxSize;
     this.strings = strings;
   }
Index: src/plugin/urlfilter-suffix/src/java/org/apache/nutch/urlfilter/suffix/SuffixURLFilter.java
===================================================================
--- src/plugin/urlfilter-suffix/src/java/org/apache/nutch/urlfilter/suffix/SuffixURLFilter.java	(revision 1480500)
+++ src/plugin/urlfilter-suffix/src/java/org/apache/nutch/urlfilter/suffix/SuffixURLFilter.java	(working copy)
@@ -181,7 +181,7 @@
       return;
     }
     BufferedReader in = new BufferedReader(reader);
-    List aSuffixes = new ArrayList();
+    List<String> aSuffixes = new ArrayList<String>();
     boolean allow = false;
     boolean ignore = false;
     String line;
Index: src/plugin/parse-zip/src/java/org/apache/nutch/parse/zip/ZipParser.java
===================================================================
--- src/plugin/parse-zip/src/java/org/apache/nutch/parse/zip/ZipParser.java	(revision 1480500)
+++ src/plugin/parse-zip/src/java/org/apache/nutch/parse/zip/ZipParser.java	(working copy)
@@ -18,15 +18,12 @@
 package org.apache.nutch.parse.zip;
 
 import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.util.Properties;
 import java.util.ArrayList;
 import java.util.List;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.nutch.metadata.Metadata;
 import org.apache.nutch.net.protocols.Response;
 import org.apache.nutch.parse.Outlink;
 import org.apache.nutch.parse.ParseData;
@@ -40,8 +37,6 @@
 /**
  * ZipParser class based on MSPowerPointParser class by Stephan Strittmatter.
  * Nutch parse plugin for zip files - Content Type : application/zip
- * 
- * @author Rohit Kulkarni & Ashish Vaidya
  */
 public class ZipParser implements Parser {
 
@@ -57,17 +52,13 @@
     String resultText = null;
     String resultTitle = null;
     Outlink[] outlinks = null;
-    List outLinksList = new ArrayList();
-    Properties properties = null;
+    List<Outlink> outLinksList = new ArrayList<Outlink>();
 
     try {
       final String contentLen = content.getMetadata().get(Response.CONTENT_LENGTH);
       final int len = Integer.parseInt(contentLen);
       if (LOG.isDebugEnabled()) { LOG.debug("ziplen: " + len); }
       final byte[] contentInBytes = content.getContent();
-      final ByteArrayInputStream bainput = new ByteArrayInputStream(
-          contentInBytes);
-      final InputStream input = bainput;
 
       if (contentLen != null && contentInBytes.length != len) {
         return new ParseStatus(ParseStatus.FAILED,
Index: src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java
===================================================================
--- src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java	(revision 1480519)
+++ src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java	(working copy)
@@ -59,7 +59,8 @@
 	private HtmlParseFilters htmlParseFilters;
 	private String cachingPolicy;
 
-	public ParseResult getParse(Content content) {
+	@SuppressWarnings("deprecation")
+  public ParseResult getParse(Content content) {
 		String mimeType = content.getContentType();
 
 		URL base;
Index: src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/DOMBuilder.java
===================================================================
--- src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/DOMBuilder.java	(revision 1480500)
+++ src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/DOMBuilder.java	(working copy)
@@ -58,7 +58,7 @@
   public DocumentFragment m_docFrag = null;
 
   /** Vector of element nodes          */
-  protected Stack m_elemStack = new Stack();
+  protected Stack<Element> m_elemStack = new Stack<Element>();
 
   /**
    * DOMBuilder instance constructor... it will add the DOM nodes
Index: src/test/org/apache/nutch/plugin/TestPluginSystem.java
===================================================================
--- src/test/org/apache/nutch/plugin/TestPluginSystem.java	(revision 1480500)
+++ src/test/org/apache/nutch/plugin/TestPluginSystem.java	(working copy)
@@ -35,13 +35,11 @@
 
 /**
  * Unit tests for the plugin system
- * 
- * @author joa23
  */
 public class TestPluginSystem extends TestCase {
     private int fPluginCount;
 
-    private LinkedList fFolders = new LinkedList();
+    private LinkedList<File> fFolders = new LinkedList<File>();
     private Configuration conf ;
     private PluginRepository repository;
 
@@ -62,11 +60,10 @@
      */
     protected void tearDown() throws Exception {
         for (int i = 0; i < fFolders.size(); i++) {
-            File folder = (File) fFolders.get(i);
+            File folder = fFolders.get(i);
             delete(folder);
             folder.delete();
         }
-
     }
 
     /**
Index: src/test/org/apache/nutch/segment/TestSegmentMerger.java
===================================================================
--- src/test/org/apache/nutch/segment/TestSegmentMerger.java	(revision 1480500)
+++ src/test/org/apache/nutch/segment/TestSegmentMerger.java	(working copy)
@@ -42,11 +42,11 @@
   public void setUp() throws Exception {
     conf = NutchConfiguration.create();
     fs = FileSystem.get(conf);
-    long blkSize = fs.getDefaultBlockSize();
     testDir = new Path(conf.get("hadoop.tmp.dir"), "merge-" + System.currentTimeMillis());
     seg1 = new Path(testDir, "seg1");
     seg2 = new Path(testDir, "seg2");
     out = new Path(testDir, "out");
+
     // create large parse-text segments
     System.err.println("Creating large segment 1...");
     DecimalFormat df = new DecimalFormat("0000000");
@@ -55,6 +55,9 @@
     MapFile.Writer w = new MapFile.Writer(conf, fs, ptPath.toString(), Text.class, ParseText.class);
     long curSize = 0;
     countSeg1 = 0;
+    FileStatus fileStatus = fs.getFileStatus(ptPath);
+    long blkSize = fileStatus.getBlockSize();
+    
     while (curSize < blkSize * 2) {
       k.set("seg1-" + df.format(countSeg1));
       w.append(k, new ParseText("seg1 text " + countSeg1));
Index: src/test/org/apache/nutch/crawl/TestLinkDbMerger.java
===================================================================
--- src/test/org/apache/nutch/crawl/TestLinkDbMerger.java	(revision 1480500)
+++ src/test/org/apache/nutch/crawl/TestLinkDbMerger.java	(working copy)
@@ -68,9 +68,9 @@
   String[] urls20_expected = urls11_expected;
   String[] urls21_expected = urls21;
   
-  TreeMap init1 = new TreeMap();
-  TreeMap init2 = new TreeMap();
-  HashMap expected = new HashMap();
+  TreeMap<String, String[]> init1 = new TreeMap<String, String[]>();
+  TreeMap<String, String[]> init2 = new TreeMap<String, String[]>();
+  HashMap<String, String[]> expected = new HashMap<String, String[]>();
   Configuration conf;
   Path testDir;
   FileSystem fs;
@@ -116,16 +116,16 @@
     merger.merge(output, new Path[]{linkdb1, linkdb2}, false, false);
     LOG.fine("* reading linkdb: " + output);
     reader = new LinkDbReader(conf, output);
-    Iterator it = expected.keySet().iterator();
+    Iterator<String> it = expected.keySet().iterator();
     while (it.hasNext()) {
-      String url = (String)it.next();
+      String url = it.next();
       LOG.fine("url=" + url);
-      String[] vals = (String[])expected.get(url);
+      String[] vals = expected.get(url);
       Inlinks inlinks = reader.getInlinks(new Text(url));
       // may not be null
       assertNotNull(inlinks);
-      ArrayList links = new ArrayList();
-      Iterator it2 = inlinks.iterator();
+      ArrayList<String> links = new ArrayList<String>();
+      Iterator<?> it2 = inlinks.iterator();
       while (it2.hasNext()) {
         Inlink in = (Inlink)it2.next();
         links.add(in.getFromUrl());
@@ -139,15 +139,15 @@
     fs.delete(testDir, true);
   }
   
-  private void createLinkDb(Configuration config, FileSystem fs, Path linkdb, TreeMap init) throws Exception {
+  private void createLinkDb(Configuration config, FileSystem fs, Path linkdb, TreeMap<String, String[]> init) throws Exception {
     LOG.fine("* creating linkdb: " + linkdb);
     Path dir = new Path(linkdb, LinkDb.CURRENT_NAME);
     MapFile.Writer writer = new MapFile.Writer(config, fs, new Path(dir, "part-00000").toString(), Text.class, Inlinks.class);
-    Iterator it = init.keySet().iterator();
+    Iterator<String> it = init.keySet().iterator();
     while (it.hasNext()) {
-      String key = (String)it.next();
+      String key = it.next();
       Inlinks inlinks = new Inlinks();
-      String[] vals = (String[])init.get(key);
+      String[] vals = init.get(key);
       for (int i = 0; i < vals.length; i++) {
         Inlink in = new Inlink(vals[i], vals[i]);
         inlinks.add(in);
Index: src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java
===================================================================
--- src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java	(revision 1480500)
+++ src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java	(working copy)
@@ -44,9 +44,9 @@
           url21
   };
   
-  TreeSet init1 = new TreeSet();
-  TreeSet init2 = new TreeSet();
-  HashMap expected = new HashMap();
+  TreeSet<String> init1 = new TreeSet<String>();
+  TreeSet<String> init2 = new TreeSet<String>();
+  HashMap<String, CrawlDatum> expected = new HashMap<String, CrawlDatum>();
   CrawlDatum cd1, cd2, cd3;
   Configuration conf;
   FileSystem fs;
@@ -83,6 +83,7 @@
     fs.mkdirs(testDir);
   }
   
+  @SuppressWarnings("deprecation")
   public void tearDown() {
     try {
       if (fs.exists(testDir))
@@ -93,6 +94,7 @@
     } catch (Exception e) { }
   }
 
+  @SuppressWarnings("deprecation")
   public void testMerge() throws Exception {
     Path crawldb1 = new Path(testDir, "crawldb1");
     Path crawldb2 = new Path(testDir, "crawldb2");
@@ -105,11 +107,11 @@
     LOG.fine("* reading crawldb: " + output);
     reader = new CrawlDbReader();
     String crawlDb = output.toString();
-    Iterator it = expected.keySet().iterator();
+    Iterator<String> it = expected.keySet().iterator();
     while (it.hasNext()) {
-      String url = (String)it.next();
+      String url = it.next();
       LOG.fine("url=" + url);
-      CrawlDatum cd = (CrawlDatum)expected.get(url);
+      CrawlDatum cd = expected.get(url);
       CrawlDatum res = reader.get(crawlDb, url, conf);
       LOG.fine(" -> " + res);
       System.out.println("url=" + url);
@@ -123,13 +125,13 @@
     fs.delete(testDir);
   }
   
-  private void createCrawlDb(Configuration config, FileSystem fs, Path crawldb, TreeSet init, CrawlDatum cd) throws Exception {
+  private void createCrawlDb(Configuration config, FileSystem fs, Path crawldb, TreeSet<String> init, CrawlDatum cd) throws Exception {
     LOG.fine("* creating crawldb: " + crawldb);
     Path dir = new Path(crawldb, CrawlDb.CURRENT_NAME);
     MapFile.Writer writer = new MapFile.Writer(config, fs, new Path(dir, "part-00000").toString(), Text.class, CrawlDatum.class);
-    Iterator it = init.iterator();
+    Iterator<String> it = init.iterator();
     while (it.hasNext()) {
-      String key = (String)it.next();
+      String key = it.next();
       writer.append(new Text(key), cd);
     }
     writer.close();
Index: src/java/org/apache/nutch/scoring/webgraph/NodeDumper.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/NodeDumper.java	(revision 1480500)
+++ src/java/org/apache/nutch/scoring/webgraph/NodeDumper.java	(working copy)
@@ -343,36 +343,57 @@
     throws Exception {
 
     Options options = new Options();
-    Option helpOpts = OptionBuilder.withArgName("help").withDescription(
-      "show this help message").create("help");
-    Option webGraphDbOpts = OptionBuilder.withArgName("webgraphdb").hasArg().withDescription(
-      "the web graph database to use").create("webgraphdb");
-    Option inlinkOpts = OptionBuilder.withArgName("inlinks").withDescription(
-      "show highest inlinks").create("inlinks");
-    Option outlinkOpts = OptionBuilder.withArgName("outlinks").withDescription(
-      "show highest outlinks").create("outlinks");
-    Option scoreOpts = OptionBuilder.withArgName("scores").withDescription(
-      "show highest scores").create("scores");
-    Option topNOpts = OptionBuilder.withArgName("topn").hasOptionalArg().withDescription(
-      "show topN scores").create("topn");
-    Option outputOpts = OptionBuilder.withArgName("output").hasArg().withDescription(
-      "the output directory to use").create("output");
-    Option effOpts = OptionBuilder.withArgName("asEff").withDescription(
-      "Solr ExternalFileField compatible output format").create("asEff");
-    Option groupOpts = OptionBuilder.hasArgs(2).withDescription(
-      "group <host|domain> <sum|max>").create("group");
-    Option sequenceFileOpts = OptionBuilder.withArgName("asSequenceFile").withDescription(
-      "whether to output as a sequencefile").create("asSequenceFile");
-
+    OptionBuilder.withArgName("help");
+    OptionBuilder.withDescription("show this help message");
+    Option helpOpts = OptionBuilder.create("help");
     options.addOption(helpOpts);
+    
+    OptionBuilder.withArgName("webgraphdb");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the web graph database to use");
+    Option webGraphDbOpts = OptionBuilder.create("webgraphdb");
     options.addOption(webGraphDbOpts);
+    
+    OptionBuilder.withArgName("inlinks");
+    OptionBuilder.withDescription("show highest inlinks");
+    Option inlinkOpts = OptionBuilder.create("inlinks");
     options.addOption(inlinkOpts);
+    
+    OptionBuilder.withArgName("outlinks");
+    OptionBuilder.withDescription("show highest outlinks");
+    Option outlinkOpts = OptionBuilder.create("outlinks");
     options.addOption(outlinkOpts);
+    
+    OptionBuilder.withArgName("scores");
+    OptionBuilder.withDescription("show highest scores");
+    Option scoreOpts = OptionBuilder.create("scores");
     options.addOption(scoreOpts);
+    
+    OptionBuilder.withArgName("topn");
+    OptionBuilder.hasOptionalArg();
+    OptionBuilder.withDescription("show topN scores");
+    Option topNOpts = OptionBuilder.create("topn");
     options.addOption(topNOpts);
+    
+    OptionBuilder.withArgName("output");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the output directory to use");
+    Option outputOpts = OptionBuilder.create("output");
     options.addOption(outputOpts);
+    
+    OptionBuilder.withArgName("asEff");
+    OptionBuilder.withDescription("Solr ExternalFileField compatible output format");
+    Option effOpts = OptionBuilder.create("asEff");
     options.addOption(effOpts);
+    
+    OptionBuilder.hasArgs(2);
+    OptionBuilder.withDescription("group <host|domain> <sum|max>");
+    Option groupOpts = OptionBuilder.create("group");
     options.addOption(groupOpts);
+    
+    OptionBuilder.withArgName("asSequenceFile");
+    OptionBuilder.withDescription("whether to output as a sequencefile");
+    Option sequenceFileOpts = OptionBuilder.create("asSequenceFile");
     options.addOption(sequenceFileOpts);
 
     CommandLineParser parser = new GnuParser();
@@ -388,7 +409,6 @@
       String webGraphDb = line.getOptionValue("webgraphdb");
       boolean inlinks = line.hasOption("inlinks");
       boolean outlinks = line.hasOption("outlinks");
-      boolean scores = line.hasOption("scores");
 
       long topN = (line.hasOption("topn")
         ? Long.parseLong(line.getOptionValue("topn")) : Long.MAX_VALUE);
Index: src/java/org/apache/nutch/scoring/webgraph/NodeReader.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/NodeReader.java	(revision 1480500)
+++ src/java/org/apache/nutch/scoring/webgraph/NodeReader.java	(working copy)
@@ -90,14 +90,21 @@
     throws Exception {
 
     Options options = new Options();
-    Option helpOpts = OptionBuilder.withArgName("help").withDescription(
-      "show this help message").create("help");
-    Option webGraphOpts = OptionBuilder.withArgName("webgraphdb").hasArg()
-      .withDescription("the webgraphdb to use").create("webgraphdb");
-    Option urlOpts = OptionBuilder.withArgName("url").hasOptionalArg()
-      .withDescription("the url to dump").create("url");
+    OptionBuilder.withArgName("help");
+    OptionBuilder.withDescription("show this help message");
+    Option helpOpts = OptionBuilder.create("help");
     options.addOption(helpOpts);
+    
+    OptionBuilder.withArgName("webgraphdb");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the webgraphdb to use");
+    Option webGraphOpts = OptionBuilder.create("webgraphdb");
     options.addOption(webGraphOpts);
+    
+    OptionBuilder.withArgName("url");
+    OptionBuilder.hasOptionalArg();
+    OptionBuilder.withDescription("the url to dump");
+    Option urlOpts = OptionBuilder.create("url");
     options.addOption(urlOpts);
 
     CommandLineParser parser = new GnuParser();
Index: src/java/org/apache/nutch/scoring/webgraph/LinkRank.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/LinkRank.java	(revision 1480500)
+++ src/java/org/apache/nutch/scoring/webgraph/LinkRank.java	(working copy)
@@ -287,12 +287,10 @@
     implements Mapper<Text, Node, Text, LongWritable>,
     Reducer<Text, LongWritable, Text, LongWritable> {
 
-    private JobConf conf;
     private static Text numNodes = new Text(NUM_NODES);
     private static LongWritable one = new LongWritable(1L);
 
     public void configure(JobConf conf) {
-      this.conf = conf;
     }
 
     /**
@@ -678,11 +676,15 @@
     throws Exception {
 
     Options options = new Options();
-    Option helpOpts = OptionBuilder.withArgName("help").withDescription(
-      "show this help message").create("help");
-    Option webgraphOpts = OptionBuilder.withArgName("webgraphdb").hasArg().withDescription(
-      "the web graph db to use").create("webgraphdb");
+    OptionBuilder.withArgName("help");
+    OptionBuilder.withDescription("show this help message");
+    Option helpOpts = OptionBuilder.create("help");
     options.addOption(helpOpts);
+    
+    OptionBuilder.withArgName("webgraphdb");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the web graph db to use");
+    Option webgraphOpts = OptionBuilder.create("webgraphdb");
     options.addOption(webgraphOpts);
 
     CommandLineParser parser = new GnuParser();
Index: src/java/org/apache/nutch/scoring/webgraph/LinkDumper.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/LinkDumper.java	(revision 1480500)
+++ src/java/org/apache/nutch/scoring/webgraph/LinkDumper.java	(working copy)
@@ -433,12 +433,17 @@
     throws Exception {
 
     Options options = new Options();
-    Option helpOpts = OptionBuilder.withArgName("help").withDescription(
-      "show this help message").create("help");
-    Option webGraphDbOpts = OptionBuilder.withArgName("webgraphdb").hasArg()
-      .withDescription("the web graph database to use").create("webgraphdb");
+    OptionBuilder.withArgName("help");
+    OptionBuilder.withDescription("show this help message");
+    Option helpOpts = OptionBuilder.create("help");
     options.addOption(helpOpts);
+    
+    OptionBuilder.withArgName("webgraphdb");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the web graph database to use");
+    Option webGraphDbOpts = OptionBuilder.create("webgraphdb");
     options.addOption(webGraphDbOpts);
+    
     CommandLineParser parser = new GnuParser();
     try {
 
Index: src/java/org/apache/nutch/scoring/webgraph/Loops.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/Loops.java	(revision 1480500)
+++ src/java/org/apache/nutch/scoring/webgraph/Loops.java	(working copy)
@@ -583,11 +583,15 @@
     throws Exception {
 
     Options options = new Options();
-    Option helpOpts = OptionBuilder.withArgName("help").withDescription(
-      "show this help message").create("help");
-    Option webGraphDbOpts = OptionBuilder.withArgName("webgraphdb").hasArg().withDescription(
-      "the web graph database to use").create("webgraphdb");
+    OptionBuilder.withArgName("help");
+    OptionBuilder.withDescription("show this help message");
+    Option helpOpts = OptionBuilder.create("help");
     options.addOption(helpOpts);
+    
+    OptionBuilder.withArgName("webgraphdb");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the web graph database to use");
+    Option webGraphDbOpts = OptionBuilder.create("webgraphdb");
     options.addOption(webGraphDbOpts);
 
     CommandLineParser parser = new GnuParser();
Index: src/java/org/apache/nutch/scoring/webgraph/LoopReader.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/LoopReader.java	(revision 1480500)
+++ src/java/org/apache/nutch/scoring/webgraph/LoopReader.java	(working copy)
@@ -45,9 +45,7 @@
   private FileSystem fs;
   private MapFile.Reader[] loopReaders;
   
-  public LoopReader() {
-    
-  }
+  public LoopReader() { }
   
   public LoopReader(Configuration conf) {
     super(conf);
@@ -94,14 +92,21 @@
     throws Exception {
 
     Options options = new Options();
-    Option helpOpts = OptionBuilder.withArgName("help").withDescription(
-      "show this help message").create("help");
-    Option webGraphOpts = OptionBuilder.withArgName("webgraphdb").hasArg()
-      .withDescription("the webgraphdb to use").create("webgraphdb");
-    Option urlOpts = OptionBuilder.withArgName("url").hasOptionalArg()
-      .withDescription("the url to dump").create("url");
+    OptionBuilder.withArgName("help");
+    OptionBuilder.withDescription("show this help message");
+    Option helpOpts = OptionBuilder.create("help");
     options.addOption(helpOpts);
+    
+    OptionBuilder.withArgName("webgraphdb");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the webgraphdb to use");
+    Option webGraphOpts = OptionBuilder.create("webgraphdb");
     options.addOption(webGraphOpts);
+    
+    OptionBuilder.withArgName("url");
+    OptionBuilder.hasOptionalArg();
+    OptionBuilder.withDescription("the url to dump");
+    Option urlOpts = OptionBuilder.create("url");
     options.addOption(urlOpts);
 
     CommandLineParser parser = new GnuParser();
Index: src/java/org/apache/nutch/scoring/webgraph/WebGraph.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/WebGraph.java	(revision 1480500)
+++ src/java/org/apache/nutch/scoring/webgraph/WebGraph.java	(working copy)
@@ -405,28 +405,13 @@
     extends Configured
     implements Mapper<Text, LinkDatum, Text, LinkDatum> {
 
-    private JobConf conf;
     private long timestamp;
 
     /**
-     * Default constructor.
-     */
-    public InlinkDb() {
-    }
-
-    /**
-     * Configurable constructor.
-     */
-    public InlinkDb(Configuration conf) {
-      setConf(conf);
-    }
-
-    /**
      * Configures job. Sets timestamp for all Inlink LinkDatum objects to the
      * current system time.
      */
     public void configure(JobConf conf) {
-      this.conf = conf;
       timestamp = System.currentTimeMillis();
     }
 
@@ -461,30 +446,12 @@
     extends Configured
     implements Reducer<Text, LinkDatum, Text, Node> {
 
-    private JobConf conf;
-
     /**
-     * Default constructor.
-     */
-    public NodeDb() {
-    }
-
-    /**
-     * Configurable constructor.
-     */
-    public NodeDb(Configuration conf) {
-      setConf(conf);
-    }
-
-    /**
      * Configures job.
      */
-    public void configure(JobConf conf) {
-      this.conf = conf;
-    }
+    public void configure(JobConf conf) { }
 
-    public void close() {
-    }
+    public void close() { }
 
     /**
      * Counts the number of inlinks and outlinks for each url and sets a default
@@ -731,23 +698,37 @@
     throws Exception {
 
     Options options = new Options();
-    Option helpOpts = OptionBuilder.withArgName("help").withDescription(
-      "show this help message").create("help");
-    Option webGraphDbOpts = OptionBuilder.withArgName("webgraphdb").hasArg().withDescription(
-      "the web graph database to use").create("webgraphdb");
-    Option segOpts = OptionBuilder.withArgName("segment").hasArgs().withDescription(
-      "the segment(s) to use").create("segment");
-    Option segDirOpts = OptionBuilder.withArgName("segmentDir").hasArgs().withDescription(
-      "the segment directory to use").create("segmentDir");
-    Option normalizeOpts = OptionBuilder.withArgName("normalize").withDescription(
-      "whether to use URLNormalizers on the URL's in the segment").create("normalize");
-    Option filterOpts = OptionBuilder.withArgName("filter").withDescription(
-      "whether to use URLFilters on the URL's in the segment").create("filter");
+    OptionBuilder.withArgName("help");
+    OptionBuilder.withDescription("show this help message");
+    Option helpOpts = OptionBuilder.create("help");
     options.addOption(helpOpts);
+    
+    OptionBuilder.withArgName("webgraphdb");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the web graph database to use");
+    Option webGraphDbOpts = OptionBuilder.create("webgraphdb");
     options.addOption(webGraphDbOpts);
+    
+    OptionBuilder.withArgName("segment");
+    OptionBuilder.hasArgs();
+    OptionBuilder.withDescription("the segment(s) to use");
+    Option segOpts = OptionBuilder.create("segment");
     options.addOption(segOpts);
+    
+    OptionBuilder.withArgName("segmentDir");
+    OptionBuilder.hasArgs();
+    OptionBuilder.withDescription("the segment directory to use");
+    Option segDirOpts = OptionBuilder.create("segmentDir");
     options.addOption(segDirOpts);
+    
+    OptionBuilder.withArgName("normalize");
+    OptionBuilder.withDescription("whether to use URLNormalizers on the URL's in the segment");
+    Option normalizeOpts = OptionBuilder.create("normalize");
     options.addOption(normalizeOpts);
+    
+    OptionBuilder.withArgName("filter");
+    OptionBuilder.withDescription("whether to use URLFilters on the URL's in the segment");
+    Option filterOpts = OptionBuilder.create("filter");
     options.addOption(filterOpts);
 
     CommandLineParser parser = new GnuParser();
Index: src/java/org/apache/nutch/scoring/webgraph/ScoreUpdater.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/ScoreUpdater.java	(revision 1480500)
+++ src/java/org/apache/nutch/scoring/webgraph/ScoreUpdater.java	(working copy)
@@ -217,14 +217,21 @@
     throws Exception {
 
     Options options = new Options();
-    Option helpOpts = OptionBuilder.withArgName("help").withDescription(
-      "show this help message").create("help");
-    Option crawlDbOpts = OptionBuilder.withArgName("crawldb").hasArg().withDescription(
-      "the crawldb to use").create("crawldb");
-    Option webGraphOpts = OptionBuilder.withArgName("webgraphdb").hasArg().withDescription(
-      "the webgraphdb to use").create("webgraphdb");
+    OptionBuilder.withArgName("help");
+    OptionBuilder.withDescription("show this help message");
+    Option helpOpts = OptionBuilder.create("help");
     options.addOption(helpOpts);
+    
+    OptionBuilder.withArgName("crawldb");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the crawldb to use");
+    Option crawlDbOpts = OptionBuilder.create("crawldb");
     options.addOption(crawlDbOpts);
+    
+    OptionBuilder.withArgName("webgraphdb");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the webgraphdb to use");
+    Option webGraphOpts = OptionBuilder.create("webgraphdb");
     options.addOption(webGraphOpts);
 
     CommandLineParser parser = new GnuParser();
Index: src/java/org/apache/nutch/crawl/Generator.java
===================================================================
--- src/java/org/apache/nutch/crawl/Generator.java	(revision 1480500)
+++ src/java/org/apache/nutch/crawl/Generator.java	(working copy)
@@ -368,6 +368,7 @@
       super(Text.class);
     }
 
+    @SuppressWarnings("rawtypes" )
     public int compare(WritableComparable a, WritableComparable b) {
       Text url1 = (Text) a;
       Text url2 = (Text) b;
Index: src/java/org/apache/nutch/crawl/SignatureComparator.java
===================================================================
--- src/java/org/apache/nutch/crawl/SignatureComparator.java	(revision 1480500)
+++ src/java/org/apache/nutch/crawl/SignatureComparator.java	(working copy)
@@ -19,7 +19,7 @@
 
 import java.util.Comparator;
 
-public class SignatureComparator implements Comparator {
+public class SignatureComparator implements Comparator<Object> {
   public int compare(Object o1, Object o2) {
     return _compare(o1, o2);
   }
Index: src/java/org/apache/nutch/crawl/SignatureFactory.java
===================================================================
--- src/java/org/apache/nutch/crawl/SignatureFactory.java	(revision 1480500)
+++ src/java/org/apache/nutch/crawl/SignatureFactory.java	(working copy)
@@ -47,7 +47,7 @@
         if (LOG.isInfoEnabled()) {
           LOG.info("Using Signature impl: " + clazz);
         }
-        Class implClass = Class.forName(clazz);
+        Class<?> implClass = Class.forName(clazz);
         impl = (Signature)implClass.newInstance();
         impl.setConf(conf);
         objectCache.setObject(clazz, impl);
Index: src/java/org/apache/nutch/crawl/MapWritable.java
===================================================================
--- src/java/org/apache/nutch/crawl/MapWritable.java	(revision 1480500)
+++ src/java/org/apache/nutch/crawl/MapWritable.java	(working copy)
@@ -79,9 +79,9 @@
 
   private ClassIdEntry fIdFirst;
 
-  private static Map<Class, Byte> CLASS_ID_MAP = new HashMap<Class, Byte>();
+  private static Map<Class<?>, Byte> CLASS_ID_MAP = new HashMap<Class<?>, Byte>();
 
-  private static Map<Byte, Class> ID_CLASS_MAP = new HashMap<Byte, Class>();
+  private static Map<Byte, Class<?>> ID_CLASS_MAP = new HashMap<Byte, Class<?>>();
 
   static {
 
@@ -101,7 +101,7 @@
 
   }
 
-  private static void addToMap(Class clazz, Byte byteId) {
+  private static void addToMap(Class<?> clazz, Byte byteId) {
     CLASS_ID_MAP.put(clazz, byteId);
     ID_CLASS_MAP.put(byteId, clazz);
   }
@@ -338,7 +338,7 @@
       // read class-id map
       fIdCount = in.readByte();
       byte id;
-      Class clazz;
+      Class<?> clazz;
       for (int i = 0; i < fIdCount; i++) {
         try {
           id = in.readByte();
@@ -393,7 +393,7 @@
     }
   }
 
-  private byte addIdEntry(byte id, Class clazz) {
+  private byte addIdEntry(byte id, Class<?> clazz) {
     if (fIdFirst == null) {
       fIdFirst = fIdLast = new ClassIdEntry(id, clazz);
     } else {
@@ -402,7 +402,7 @@
     return id;
   }
 
-  private byte getClassId(Class clazz) {
+  private byte getClassId(Class<?> clazz) {
     Byte classId = CLASS_ID_MAP.get(clazz);
     if (classId != null) {
       return classId.byteValue();
@@ -438,8 +438,8 @@
       last = entry;
       entry = entry.fNextEntry;
     }
-    Class keyClass = getClass(keyId);
-    Class valueClass = getClass(valueId);
+    Class<?> keyClass = getClass(keyId);
+    Class<?> valueClass = getClass(valueId);
     try {
       return new KeyValueEntry((Writable) keyClass.newInstance(),
           (Writable) valueClass.newInstance());
@@ -449,8 +449,8 @@
 
   }
 
-  private Class getClass(final byte id) throws IOException {
-    Class clazz = ID_CLASS_MAP.get(new Byte(id));
+  private Class<?> getClass(final byte id) throws IOException {
+    Class<?> clazz = ID_CLASS_MAP.get(new Byte(id));
     if (clazz == null) {
       ClassIdEntry entry = fIdFirst;
       while (entry != null) {
@@ -502,14 +502,14 @@
 
   /** container for Id class tuples */
   private class ClassIdEntry {
-    public ClassIdEntry(byte id, Class clazz) {
+    public ClassIdEntry(byte id, Class<?> clazz) {
       fId = id;
       fclazz = clazz;
     }
 
     private byte fId;
 
-    private Class fclazz;
+    private Class<?> fclazz;
 
     private ClassIdEntry fNextIdEntry;
   }
Index: src/java/org/apache/nutch/crawl/NutchWritable.java
===================================================================
--- src/java/org/apache/nutch/crawl/NutchWritable.java	(revision 1480500)
+++ src/java/org/apache/nutch/crawl/NutchWritable.java	(working copy)
@@ -19,12 +19,13 @@
 import org.apache.hadoop.io.Writable;
 import org.apache.nutch.util.GenericWritableConfigurable;
 
+@SuppressWarnings("unchecked")
 public class NutchWritable extends GenericWritableConfigurable {
 
   private static Class<? extends Writable>[] CLASSES = null;
 
   static {
-    CLASSES = new Class[] {
+    CLASSES = (Class<? extends Writable>[]) new Class<?>[] {
       org.apache.hadoop.io.NullWritable.class,
       org.apache.hadoop.io.BooleanWritable.class,
       org.apache.hadoop.io.LongWritable.class,
Index: src/java/org/apache/nutch/crawl/Injector.java
===================================================================
--- src/java/org/apache/nutch/crawl/Injector.java	(revision 1480500)
+++ src/java/org/apache/nutch/crawl/Injector.java	(working copy)
@@ -60,7 +60,7 @@
   public static String nutchFixedFetchIntervalMDName = "nutch.fetchInterval.fixed";
 
   /** Normalize and filter injected urls. */
-  public static class InjectMapper implements Mapper<WritableComparable, Text, Text, CrawlDatum> {
+  public static class InjectMapper implements Mapper<WritableComparable<?>, Text, Text, CrawlDatum> {
     private URLNormalizers urlNormalizers;
     private int interval;
     private float scoreInjected;
@@ -81,7 +81,7 @@
 
     public void close() {}
 
-    public void map(WritableComparable key, Text value,
+    public void map(WritableComparable<?> key, Text value,
                     OutputCollector<Text, CrawlDatum> output, Reporter reporter)
       throws IOException {
       String url = value.toString();              // value is line of text
Index: src/java/org/apache/nutch/crawl/LinkDbReader.java
===================================================================
--- src/java/org/apache/nutch/crawl/LinkDbReader.java	(revision 1480500)
+++ src/java/org/apache/nutch/crawl/LinkDbReader.java	(working copy)
@@ -43,7 +43,7 @@
 public class LinkDbReader extends Configured implements Tool, Closeable {
   public static final Logger LOG = LoggerFactory.getLogger(LinkDbReader.class);
 
-  private static final Partitioner<WritableComparable, Writable> PARTITIONER = new HashPartitioner<WritableComparable, Writable>();
+  private static final Partitioner<WritableComparable<?>, Writable> PARTITIONER = new HashPartitioner<WritableComparable<?>, Writable>();
 
   private FileSystem fs;
   private Path directory;
Index: src/java/org/apache/nutch/crawl/FetchScheduleFactory.java
===================================================================
--- src/java/org/apache/nutch/crawl/FetchScheduleFactory.java	(revision 1480500)
+++ src/java/org/apache/nutch/crawl/FetchScheduleFactory.java	(working copy)
@@ -37,7 +37,7 @@
     if (impl == null) {
       try {
         LOG.info("Using FetchSchedule impl: " + clazz);
-        Class implClass = Class.forName(clazz);
+        Class<?> implClass = Class.forName(clazz);
         impl = (FetchSchedule)implClass.newInstance();
         impl.setConf(conf);
         objectCache.setObject(clazz, impl);
Index: src/java/org/apache/nutch/parse/ParserFactory.java
===================================================================
--- src/java/org/apache/nutch/parse/ParserFactory.java	(revision 1480500)
+++ src/java/org/apache/nutch/parse/ParserFactory.java	(working copy)
@@ -48,7 +48,7 @@
   public static final String DEFAULT_PLUGIN = "*";
   
   /** Empty extension list for caching purposes. */
-  private final List EMPTY_EXTENSION_LIST = Collections.EMPTY_LIST;
+  private final List<Extension> EMPTY_EXTENSION_LIST = Collections.<Extension>emptyList();
   
   private Configuration conf;
   private ExtensionPoint extensionPoint;
@@ -57,9 +57,9 @@
   public ParserFactory(Configuration conf) {
     this.conf = conf;
     ObjectCache objectCache = ObjectCache.get(conf);
-    this.extensionPoint = PluginRepository.get(conf).getExtensionPoint(
-        Parser.X_POINT_ID);
+    this.extensionPoint = PluginRepository.get(conf).getExtensionPoint(Parser.X_POINT_ID);
     this.parsePluginList = (ParsePluginList)objectCache.getObject(ParsePluginList.class.getName());
+    
     if (this.parsePluginList == null) {
       this.parsePluginList = new ParsePluginsReader().parse(conf);
       objectCache.setObject(ParsePluginList.class.getName(), this.parsePluginList);
@@ -121,8 +121,8 @@
     }
 
     parsers = new Vector<Parser>(parserExts.size());
-    for (Iterator i=parserExts.iterator(); i.hasNext(); ){
-      Extension ext = (Extension) i.next();
+    for (Iterator<Extension> i = parserExts.iterator(); i.hasNext(); ){
+      Extension ext = i.next();
       Parser p = null;
       try {
         //check to see if we've cached this parser instance yet
@@ -212,6 +212,7 @@
    * @return a list of extensions to be used for this contentType.
    *         If none, returns <code>null</code>.
    */
+  @SuppressWarnings("unchecked")
   protected List<Extension> getExtensions(String contentType) {
     
     ObjectCache objectCache = ObjectCache.get(conf);
@@ -411,5 +412,4 @@
   private Extension getExtensionFromAlias(Extension[] list, String id) {
     return getExtension(list, parsePluginList.getAliases().get(id));
   }
-
 }
Index: src/java/org/apache/nutch/parse/HTMLMetaTags.java
===================================================================
--- src/java/org/apache/nutch/parse/HTMLMetaTags.java	(revision 1480500)
+++ src/java/org/apache/nutch/parse/HTMLMetaTags.java	(working copy)
@@ -188,7 +188,7 @@
             + ", refreshHref=" + refreshHref + "\n"
             );
     sb.append(" * general tags:\n");
-    Iterator it = generalTags.keySet().iterator();
+    Iterator<Object> it = generalTags.keySet().iterator();
     while (it.hasNext()) {
       String key = (String)it.next();
       sb.append("   - " + key + "\t=\t" + generalTags.get(key) + "\n");
Index: src/java/org/apache/nutch/parse/ParseSegment.java
===================================================================
--- src/java/org/apache/nutch/parse/ParseSegment.java	(revision 1480500)
+++ src/java/org/apache/nutch/parse/ParseSegment.java	(working copy)
@@ -42,7 +42,7 @@
 
 /* Parse content in a segment. */
 public class ParseSegment extends Configured implements Tool,
-    Mapper<WritableComparable, Content, Text, ParseImpl>,
+    Mapper<WritableComparable<?>, Content, Text, ParseImpl>,
     Reducer<Text, Writable, Text, Writable> {
 
   public static final Logger LOG = LoggerFactory.getLogger(ParseSegment.class);
@@ -71,7 +71,7 @@
   
   private Text newKey = new Text();
 
-  public void map(WritableComparable key, Content content,
+  public void map(WritableComparable<?> key, Content content,
                   OutputCollector<Text, ParseImpl> output, Reporter reporter)
     throws IOException {
     // convert on the fly from old UTF8 keys
Index: src/java/org/apache/nutch/util/PrefixStringMatcher.java
===================================================================
--- src/java/org/apache/nutch/util/PrefixStringMatcher.java	(revision 1480500)
+++ src/java/org/apache/nutch/util/PrefixStringMatcher.java	(working copy)
@@ -45,11 +45,11 @@
    * @throws ClassCastException if any <code>Object</code>s in the
    * collection are not <code>String</code>s
    */
-  public PrefixStringMatcher(Collection prefixes) {
+  public PrefixStringMatcher(Collection<String> prefixes) {
     super();
-    Iterator iter= prefixes.iterator();
+    Iterator<String> iter= prefixes.iterator();
     while (iter.hasNext())
-      addPatternForward((String)iter.next());
+      addPatternForward(iter.next());
   }
 
   /**
Index: src/java/org/apache/nutch/util/SuffixStringMatcher.java
===================================================================
--- src/java/org/apache/nutch/util/SuffixStringMatcher.java	(revision 1480500)
+++ src/java/org/apache/nutch/util/SuffixStringMatcher.java	(working copy)
@@ -41,11 +41,11 @@
    * <code>String</code>s with any suffix in the supplied
    * <code>Collection</code>
    */
-  public SuffixStringMatcher(Collection suffixes) {
+  public SuffixStringMatcher(Collection<String> suffixes) {
     super();
-    Iterator iter= suffixes.iterator();
+    Iterator<String> iter= suffixes.iterator();
     while (iter.hasNext())
-      addPatternBackward((String)iter.next());
+      addPatternBackward(iter.next());
   }
 
   /**
Index: src/java/org/apache/nutch/util/GenericWritableConfigurable.java
===================================================================
--- src/java/org/apache/nutch/util/GenericWritableConfigurable.java	(revision 1480500)
+++ src/java/org/apache/nutch/util/GenericWritableConfigurable.java	(working copy)
@@ -41,7 +41,7 @@
   @Override
   public void readFields(DataInput in) throws IOException {
     byte type = in.readByte();
-    Class clazz = getTypes()[type];
+    Class<?> clazz = getTypes()[type];
     try {
       set((Writable) clazz.newInstance());
     } catch (Exception e) {
Index: src/java/org/apache/nutch/metadata/HttpHeaders.java
===================================================================
--- src/java/org/apache/nutch/metadata/HttpHeaders.java	(revision 1480500)
+++ src/java/org/apache/nutch/metadata/HttpHeaders.java	(working copy)
@@ -23,9 +23,6 @@
  *
  * @see <a href="http://rfc-ref.org/RFC-TEXTS/2616/">Hypertext Transfer
  *      Protocol -- HTTP/1.1 (RFC 2616)</a>
- *
- * @author Chris Mattmann
- * @author J&eacute;r&ocirc;me Charron
  */
 public interface HttpHeaders {
 
Index: src/java/org/apache/nutch/metadata/Metadata.java
===================================================================
--- src/java/org/apache/nutch/metadata/Metadata.java	(revision 1480500)
+++ src/java/org/apache/nutch/metadata/Metadata.java	(working copy)
@@ -27,13 +27,8 @@
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 
-
 /**
  * A multi-valued metadata container.
- *
- * @author Chris Mattmann
- * @author J&eacute;r&ocirc;me Charron
- *
  */
 public class Metadata implements Writable, CreativeCommons,
 DublinCore, HttpHeaders, Nutch, Feed {
@@ -128,7 +123,7 @@
    * @param properties properties to copy from
    */
   public void setAll(Properties properties) {
-    Enumeration names = properties.propertyNames();
+    Enumeration<?> names = properties.propertyNames();
     while (names.hasMoreElements()) {
       String name = (String) names.nextElement();
       metadata.put(name, new String[]{properties.getProperty(name)});
Index: src/java/org/apache/nutch/metadata/SpellCheckedMetadata.java
===================================================================
--- src/java/org/apache/nutch/metadata/SpellCheckedMetadata.java	(revision 1480500)
+++ src/java/org/apache/nutch/metadata/SpellCheckedMetadata.java	(working copy)
@@ -52,9 +52,9 @@
 
     // Uses following array to fill the metanames index and the
     // metanames list.
-    Class[] spellthese = {HttpHeaders.class};
+    Class<?>[] spellthese = {HttpHeaders.class};
 
-    for (Class spellCheckedNames : spellthese) {
+    for (Class<?> spellCheckedNames : spellthese) {
       for (Field field : spellCheckedNames.getFields()) {
         int mods = field.getModifiers();
         if (Modifier.isFinal(mods) && Modifier.isPublic(mods)
Index: src/java/org/apache/nutch/fetcher/OldFetcher.java
===================================================================
--- src/java/org/apache/nutch/fetcher/OldFetcher.java	(revision 1480500)
+++ src/java/org/apache/nutch/fetcher/OldFetcher.java	(working copy)
@@ -45,7 +45,7 @@
 
 
 /** The fetcher. Most of the work is done by plugins. */
-public class OldFetcher extends Configured implements Tool, MapRunnable<WritableComparable, Writable, Text, NutchWritable> { 
+public class OldFetcher extends Configured implements Tool, MapRunnable<WritableComparable<?>, Writable, Text, NutchWritable> { 
 
   public static final Logger LOG = LoggerFactory.getLogger(OldFetcher.class);
   
@@ -55,12 +55,11 @@
 
   public static final String PROTOCOL_REDIR = "protocol";
 
-  public static class InputFormat extends SequenceFileInputFormat<WritableComparable, Writable> {
+  public static class InputFormat extends SequenceFileInputFormat<WritableComparable<?>, Writable> {
     /** Don't split inputs, to keep things polite. */
     public InputSplit[] getSplits(JobConf job, int nSplits)
       throws IOException {
       FileStatus[] files = listStatus(job);
-      FileSystem fs = FileSystem.get(job);
       InputSplit[] splits = new InputSplit[files.length];
       for (int i = 0; i < files.length; i++) {
         FileStatus cur = files[i];
@@ -71,7 +70,7 @@
     }
   }
 
-  private RecordReader<WritableComparable, Writable> input;
+  private RecordReader<WritableComparable<?>, Writable> input;
   private OutputCollector<Text, NutchWritable> output;
   private Reporter reporter;
 
@@ -458,7 +457,7 @@
     return conf.getBoolean("fetcher.store.content", true);
   }
 
-  public void run(RecordReader<WritableComparable, Writable> input, OutputCollector<Text, NutchWritable> output,
+  public void run(RecordReader<WritableComparable<?>, Writable> input, OutputCollector<Text, NutchWritable> output,
                   Reporter reporter) throws IOException {
 
     this.input = input;
Index: src/java/org/apache/nutch/net/URLNormalizers.java
===================================================================
--- src/java/org/apache/nutch/net/URLNormalizers.java	(revision 1480500)
+++ src/java/org/apache/nutch/net/URLNormalizers.java	(working copy)
@@ -101,7 +101,7 @@
   public static final Logger LOG = LoggerFactory.getLogger(URLNormalizers.class);
 
   /* Empty extension list for caching purposes. */
-  private final List<Extension> EMPTY_EXTENSION_LIST = Collections.EMPTY_LIST;
+  private final List<Extension> EMPTY_EXTENSION_LIST = Collections.<Extension>emptyList();
   
   private final URLNormalizer[] EMPTY_NORMALIZERS = new URLNormalizer[0];
 
@@ -194,6 +194,7 @@
    *         empty list.
    * @throws PluginRuntimeException
    */
+  @SuppressWarnings("unchecked")
   private List<Extension> getExtensions(String scope) {
     ObjectCache objectCache = ObjectCache.get(conf);
     List<Extension> extensions = 
Index: src/java/org/apache/nutch/plugin/PluginDescriptor.java
===================================================================
--- src/java/org/apache/nutch/plugin/PluginDescriptor.java	(revision 1480500)
+++ src/java/org/apache/nutch/plugin/PluginDescriptor.java	(working copy)
@@ -37,8 +37,6 @@
  * <code>ExtensionPoint</code> and <code>Extension</code>. To provide
  * access to the meta data of a plugin via a descriptor allow a lazy loading
  * mechanism.
- * 
- * @author joa23
  */
 public class PluginDescriptor {
   private String fPluginPath;
@@ -47,7 +45,7 @@
   private String fVersion;
   private String fName;
   private String fProviderName;
-  private HashMap fMessages = new HashMap();
+  private HashMap<String, ResourceBundle> fMessages = new HashMap<String, ResourceBundle>();
   private ArrayList<ExtensionPoint> fExtensionPoints = new ArrayList<ExtensionPoint>();
   private ArrayList<String> fDependencies = new ArrayList<String>();
   private ArrayList<URL> fExportedLibs = new ArrayList<URL>();
@@ -338,8 +336,7 @@
   public String getResourceString(String pKey, Locale pLocale)
       throws IOException {
     if (fMessages.containsKey(pLocale.toString())) {
-      ResourceBundle bundle = (ResourceBundle) fMessages
-          .get(pLocale.toString());
+      ResourceBundle bundle = fMessages.get(pLocale.toString());
       try {
         return bundle.getString(pKey);
       } catch (MissingResourceException e) {
Index: src/java/org/apache/nutch/plugin/PluginRepository.java
===================================================================
--- src/java/org/apache/nutch/plugin/PluginRepository.java	(revision 1480500)
+++ src/java/org/apache/nutch/plugin/PluginRepository.java	(working copy)
@@ -39,8 +39,6 @@
  * descriptor represents all meta information about a plugin. So a plugin
  * instance will be created later when it is required, this allow lazy plugin
  * loading.
- * 
- * @author joa23
  */
 public class PluginRepository {
   private static final WeakHashMap<String, PluginRepository> CACHE = new WeakHashMap<String, PluginRepository>();
@@ -267,8 +265,8 @@
       // Suggested by Stefan Groschupf <sg@media-style.com>
       synchronized (pDescriptor) {
         PluginClassLoader loader = pDescriptor.getClassLoader();
-        Class pluginClass = loader.loadClass(pDescriptor.getPluginClass());
-        Constructor constructor = pluginClass.getConstructor(new Class[] {
+        Class<?> pluginClass = loader.loadClass(pDescriptor.getPluginClass());
+        Constructor<?> constructor = pluginClass.getConstructor(new Class<?>[] {
             PluginDescriptor.class, Configuration.class });
         Plugin plugin = (Plugin) constructor.newInstance(new Object[] {
             pDescriptor, this.conf });
@@ -400,7 +398,7 @@
     }
     ClassLoader cl = d.getClassLoader();
     // args[1] - class name
-    Class clazz = null;
+    Class<?> clazz = null;
     try {
       clazz = Class.forName(args[1], true, cl);
     } catch (Exception e) {
@@ -410,7 +408,7 @@
     }
     Method m = null;
     try {
-      m = clazz.getMethod("main", new Class[] { args.getClass() });
+      m = clazz.getMethod("main", new Class<?>[] { args.getClass() });
     } catch (Exception e) {
       System.err.println("Could not find the 'main(String[])' method in class "
           + args[1] + ": " + e.getMessage());
Index: src/java/org/apache/nutch/plugin/Extension.java
===================================================================
--- src/java/org/apache/nutch/plugin/Extension.java	(revision 1480500)
+++ src/java/org/apache/nutch/plugin/Extension.java	(working copy)
@@ -25,8 +25,6 @@
  * An <code>Extension</code> is a kind of listener descriptor that will be
  * installed on a concrete <code>ExtensionPoint</code> that acts as kind of
  * Publisher.
- * 
- * @author joa23
  */
 public class Extension {
   private PluginDescriptor fDescriptor;
@@ -153,7 +151,7 @@
     synchronized (getId()) {
       try {
         PluginClassLoader loader = fDescriptor.getClassLoader();
-        Class extensionClazz = loader.loadClass(getClazz());
+        Class<?> extensionClazz = loader.loadClass(getClazz());
         // lazy loading of Plugin in case there is no instance of the plugin
         // already.
         this.pluginRepository.getPluginInstance(getDescriptor());
Index: src/java/org/apache/nutch/indexer/NutchField.java
===================================================================
--- src/java/org/apache/nutch/indexer/NutchField.java	(revision 1480500)
+++ src/java/org/apache/nutch/indexer/NutchField.java	(working copy)
@@ -28,8 +28,8 @@
 import org.apache.hadoop.io.*;
 
 /**
- * This class represents a multi-valued field with a weight. Values are arbitrary
- * objects.
+ * This class represents a multi-valued field with a weight. 
+ * Values are arbitrary objects.
  */
 public class NutchField implements Writable {
   private float weight;
@@ -44,7 +44,7 @@
   public NutchField(Object value, float weight) {
     this.weight = weight;
     if (value instanceof Collection) {
-      values.addAll((Collection<Object>)value);
+      values.addAll((Collection<?>)value);
     } else {
       values.add(value);
     }
Index: src/java/org/apache/nutch/tools/proxy/SegmentHandler.java
===================================================================
--- src/java/org/apache/nutch/tools/proxy/SegmentHandler.java	(revision 1480500)
+++ src/java/org/apache/nutch/tools/proxy/SegmentHandler.java	(working copy)
@@ -42,8 +42,6 @@
 import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.nutch.metadata.Metadata;
 import org.apache.nutch.metadata.Nutch;
-import org.apache.nutch.parse.ParseData;
-import org.apache.nutch.parse.ParseText;
 import org.apache.nutch.protocol.Content;
 import org.apache.nutch.protocol.ProtocolStatus;
 import org.mortbay.jetty.Request;
@@ -88,9 +86,8 @@
   
   private static class Segment implements Closeable {
     
-    private static final Partitioner PARTITIONER = new HashPartitioner();
+    private static final Partitioner<Text,Writable> PARTITIONER = new HashPartitioner<Text,Writable>();
 
-    private FileSystem fs;
     private Path segmentDir;
 
     private Object cLock = new Object();
@@ -102,7 +99,6 @@
     private Configuration conf;
 
     public Segment(FileSystem fs, Path segmentDir, Configuration conf) throws IOException {
-      this.fs = fs;
       this.segmentDir = segmentDir;
       this.conf = conf;
     }
Index: src/java/org/apache/nutch/tools/ResolveUrls.java
===================================================================
--- src/java/org/apache/nutch/tools/ResolveUrls.java	(revision 1480500)
+++ src/java/org/apache/nutch/tools/ResolveUrls.java	(working copy)
@@ -77,7 +77,7 @@
         
         // get the address by name and if no error is thrown then it 
         // is resolved successfully
-        InetAddress ia = InetAddress.getByName(host);
+        InetAddress.getByName(host);
         LOG.info("Resolved: " + host);
         numResolved.incrementAndGet();
       }
@@ -161,19 +161,25 @@
   public static void main(String[] args) {
 
     Options options = new Options();
-    Option helpOpts = OptionBuilder.withArgName("help").withDescription(
-      "show this help message").create("help");
-    Option urlOpts = OptionBuilder.withArgName("urls").hasArg().withDescription(
-      "the urls file to check").create("urls");
-    Option numThreadOpts = OptionBuilder.withArgName("numThreads").hasArgs().withDescription(
-      "the number of threads to use").create("numThreads");
+    OptionBuilder.withArgName("help");
+    OptionBuilder.withDescription("show this help message");
+    Option helpOpts = OptionBuilder.create("help");
     options.addOption(helpOpts);
+    
+    OptionBuilder.withArgName("urls");
+    OptionBuilder.hasArg();
+    OptionBuilder.withDescription("the urls file to check");
+    Option urlOpts = OptionBuilder.create("urls");
     options.addOption(urlOpts);
+    
+    OptionBuilder.withArgName("numThreads");
+    OptionBuilder.hasArgs();
+    OptionBuilder.withDescription("the number of threads to use");
+    Option numThreadOpts = OptionBuilder.create("numThreads");
     options.addOption(numThreadOpts);
 
     CommandLineParser parser = new GnuParser();
     try {
-
       // parse out common line arguments
       CommandLine line = parser.parse(options, args);
       if (line.hasOption("help") || !line.hasOption("urls")) {
@@ -196,5 +202,4 @@
       LOG.error("ResolveUrls: " + StringUtils.stringifyException(e));
     }
   }
-
 }
Index: src/java/org/apache/nutch/tools/FreeGenerator.java
===================================================================
--- src/java/org/apache/nutch/tools/FreeGenerator.java	(revision 1480500)
+++ src/java/org/apache/nutch/tools/FreeGenerator.java	(working copy)
@@ -57,8 +57,6 @@
  * This tool generates fetchlists (segments to be fetched) from plain text
  * files containing one URL per line. It's useful when arbitrary URL-s need to
  * be fetched without adding them first to the CrawlDb, or during testing.
- * 
- * @author Andrzej Bialecki
  */
 public class FreeGenerator extends Configured implements Tool {
   private static final Logger LOG = LoggerFactory.getLogger(FreeGenerator.class);
@@ -67,7 +65,7 @@
   private static final String NORMALIZE_KEY = "free.generator.normalize";
 
   public static class FG extends MapReduceBase
-  implements Mapper<WritableComparable, Text, Text, Generator.SelectorEntry>,
+  implements Mapper<WritableComparable<?>, Text, Text, Generator.SelectorEntry>,
   Reducer<Text, Generator.SelectorEntry, Text, CrawlDatum> {
     private URLNormalizers normalizers = null;
     private URLFilters filters = null;
@@ -89,7 +87,7 @@
     
     Generator.SelectorEntry entry = new Generator.SelectorEntry();
 
-    public void map(WritableComparable key, Text value, OutputCollector<Text,
+    public void map(WritableComparable<?> key, Text value, OutputCollector<Text,
         Generator.SelectorEntry> output, Reporter reporter) throws IOException {
       // value is a line of text
       String urlString = value.toString();
Index: src/java/org/apache/nutch/segment/SegmentMerger.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentMerger.java	(revision 1480500)
+++ src/java/org/apache/nutch/segment/SegmentMerger.java	(working copy)
@@ -16,6 +16,7 @@
  */
 package org.apache.nutch.segment;
 
+import java.io.Closeable;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -207,7 +208,7 @@
         MapFile.Writer pt_out = null;
         SequenceFile.Writer g_out = null;
         SequenceFile.Writer p_out = null;
-        HashMap sliceWriters = new HashMap();
+        HashMap<String, Closeable> sliceWriters = new HashMap<String, Closeable>();
         String segmentName = job.get("segment.merger.segmentName");
         
         public void write(Text key, MetaWrapper wrapper) throws IOException {
@@ -288,7 +289,7 @@
         }
 
         public void close(Reporter reporter) throws IOException {
-          Iterator<Object> it = sliceWriters.values().iterator();
+          Iterator<Closeable> it = sliceWriters.values().iterator();
           while (it.hasNext()) {
             Object o = it.next();
             if (o instanceof SequenceFile.Writer) {
Index: src/java/org/apache/nutch/segment/SegmentReader.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentReader.java	(revision 1480500)
+++ src/java/org/apache/nutch/segment/SegmentReader.java	(working copy)
@@ -80,10 +80,10 @@
   private FileSystem fs;
 
   public static class InputCompatMapper extends MapReduceBase implements
-      Mapper<WritableComparable, Writable, Text, NutchWritable> {
+      Mapper<WritableComparable<?>, Writable, Text, NutchWritable> {
     private Text newKey = new Text();
 
-    public void map(WritableComparable key, Writable value,
+    public void map(WritableComparable<?> key, Writable value,
         OutputCollector<Text, NutchWritable> collector, Reporter reporter) throws IOException {
       // convert on the fly from old formats with UTF8 keys.
       // UTF8 deprecated and replaced by Text.
@@ -98,8 +98,8 @@
 
   /** Implements a text output format */
   public static class TextOutputFormat extends
-      FileOutputFormat<WritableComparable, Writable> {
-    public RecordWriter<WritableComparable, Writable> getRecordWriter(
+      FileOutputFormat<WritableComparable<?>, Writable> {
+    public RecordWriter<WritableComparable<?>, Writable> getRecordWriter(
         final FileSystem fs, JobConf job,
         String name, final Progressable progress) throws IOException {
 
@@ -109,8 +109,8 @@
       if (fs.exists(segmentDumpFile)) fs.delete(segmentDumpFile, true);
 
       final PrintStream printStream = new PrintStream(fs.create(segmentDumpFile));
-      return new RecordWriter<WritableComparable, Writable>() {
-        public synchronized void write(WritableComparable key, Writable value) throws IOException {
+      return new RecordWriter<WritableComparable<?>, Writable>() {
+        public synchronized void write(WritableComparable<?> key, Writable value) throws IOException {
           printStream.println(value);
         }
 
@@ -379,8 +379,8 @@
   private List<Writable> getMapRecords(Path dir, Text key) throws Exception {
     MapFile.Reader[] readers = MapFileOutputFormat.getReaders(fs, dir, getConf());
     ArrayList<Writable> res = new ArrayList<Writable>();
-    Class keyClass = readers[0].getKeyClass();
-    Class valueClass = readers[0].getValueClass();
+    Class<?> keyClass = readers[0].getKeyClass();
+    Class<?> valueClass = readers[0].getValueClass();
     if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
       throw new IOException("Incompatible key (" + keyClass.getName() + ")");
     Writable value = (Writable)valueClass.newInstance();
@@ -403,8 +403,8 @@
   private List<Writable> getSeqRecords(Path dir, Text key) throws Exception {
     SequenceFile.Reader[] readers = SequenceFileOutputFormat.getReaders(getConf(), dir);
     ArrayList<Writable> res = new ArrayList<Writable>();
-    Class keyClass = readers[0].getKeyClass();
-    Class valueClass = readers[0].getValueClass();
+    Class<?> keyClass = readers[0].getKeyClass();
+    Class<?> valueClass = readers[0].getValueClass();
     if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
       throw new IOException("Incompatible key (" + keyClass.getName() + ")");
     Writable aKey = (Writable)keyClass.newInstance();
Index: src/java/org/apache/nutch/segment/SegmentMergeFilter.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentMergeFilter.java	(revision 1480500)
+++ src/java/org/apache/nutch/segment/SegmentMergeFilter.java	(working copy)
@@ -18,7 +18,7 @@
 
 import java.util.Collection;
 
-import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.nutch.parse.ParseData;
 import org.apache.nutch.parse.ParseText;
@@ -41,7 +41,7 @@
    * @return <tt>true</tt> values for this <tt>key</tt> (URL) should be merged
    *         into the new segment.
    */
-  public boolean filter(WritableComparable key, CrawlDatum generateData,
+  public boolean filter(Text key, CrawlDatum generateData,
       CrawlDatum fetchData, CrawlDatum sigData, Content content,
       ParseData parseData, ParseText parseText, Collection<CrawlDatum> linked);
 }
Index: src/java/org/apache/nutch/segment/SegmentMergeFilters.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentMergeFilters.java	(revision 1480500)
+++ src/java/org/apache/nutch/segment/SegmentMergeFilters.java	(working copy)
@@ -21,7 +21,7 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.nutch.net.URLFilter;
 import org.apache.nutch.parse.ParseData;
@@ -65,16 +65,14 @@
    * @return <tt>true</tt> values for this <tt>key</tt> (URL) should be merged
    *         into the new segment.
    */
-  public boolean filter(WritableComparable key, CrawlDatum generateData,
+  public boolean filter(Text key, CrawlDatum generateData,
       CrawlDatum fetchData, CrawlDatum sigData, Content content,
       ParseData parseData, ParseText parseText, Collection<CrawlDatum> linked) {
     for (SegmentMergeFilter filter : filters) {
       if (!filter.filter(key, generateData, fetchData, sigData, content,
           parseData, parseText, linked)) {
         if (LOG.isTraceEnabled())
-          LOG
-              .trace("Key " + key + " dropped by "
-                  + filter.getClass().getName());
+          LOG.trace("Key " + key + " dropped by " + filter.getClass().getName());
         return false;
       }
     }
Index: build.xml
===================================================================
--- build.xml	(revision 1480500)
+++ build.xml	(working copy)
@@ -57,6 +57,10 @@
     </fileset>
   </path>
 
+  <presetdef name="javac">
+    <javac includeantruntime="false" />
+  </presetdef>
+
   <!-- ====================================================== -->
   <!-- Stuff needed by all targets                            -->
   <!-- ====================================================== -->
@@ -94,7 +98,7 @@
      target="${javac.version}"
      source="${javac.version}"
      deprecation="${javac.deprecation}">
-      <compilerarg value="-Xlint"/>
+      <compilerarg value="-Xlint:-path"/>
       <classpath refid="classpath"/>
     </javac>    
   </target>
@@ -341,7 +345,7 @@
      target="${javac.version}"
      source="${javac.version}"
      deprecation="${javac.deprecation}">
-      <compilerarg value="-Xlint"/>
+      <compilerarg value="-Xlint:-path"/>
       <classpath refid="test.classpath"/>
     </javac>    
   </target>
