Index: src/test/org/apache/nutch/parse/TestParserFactory.java
===================================================================
--- src/test/org/apache/nutch/parse/TestParserFactory.java	(revision 1432381)
+++ src/test/org/apache/nutch/parse/TestParserFactory.java	(working copy)
@@ -49,11 +49,11 @@
     
   /** Unit test for <code>getExtensions(String)</code> method. */
   public void testGetExtensions() throws Exception {
-    Extension ext = (Extension)parserFactory.getExtensions("text/html").get(0);
+    Extension ext = parserFactory.getExtensions("text/html").get(0);
     assertEquals("parse-tika", ext.getDescriptor().getPluginId());
-    ext = (Extension) parserFactory.getExtensions("text/html; charset=ISO-8859-1").get(0);
+    ext = parserFactory.getExtensions("text/html; charset=ISO-8859-1").get(0);
     assertEquals("parse-tika", ext.getDescriptor().getPluginId());
-    ext = (Extension)parserFactory.getExtensions("foo/bar").get(0);
+    ext = parserFactory.getExtensions("foo/bar").get(0);
     assertEquals("parse-tika", ext.getDescriptor().getPluginId());
   }
   
Index: src/java/org/apache/nutch/crawl/Generator.java
===================================================================
--- src/java/org/apache/nutch/crawl/Generator.java	(revision 1432381)
+++ src/java/org/apache/nutch/crawl/Generator.java	(working copy)
@@ -201,7 +201,7 @@
       }
       float sort = 1.0f;
       try {
-        sort = scfilters.generatorSortValue((Text) key, crawlDatum, sort);
+        sort = scfilters.generatorSortValue(key, crawlDatum, sort);
       } catch (ScoringFilterException sfe) {
         if (LOG.isWarnEnabled()) {
           LOG.warn("Couldn't filter generatorSortValue for " + key + ": " + sfe);
@@ -222,7 +222,7 @@
       // record generation time
       crawlDatum.getMetaData().put(Nutch.WRITABLE_GENERATE_TIME_KEY, genTime);
       entry.datum = crawlDatum;
-      entry.url = (Text) key;
+      entry.url = key;
       output.collect(sortValue, entry); // invert for sort by score
     }
 
@@ -345,7 +345,7 @@
 
     public void map(FloatWritable key, SelectorEntry value,
         OutputCollector<Text,SelectorEntry> output, Reporter reporter) throws IOException {
-      SelectorEntry entry = (SelectorEntry) value;
+      SelectorEntry entry = value;
       output.collect(entry.url, entry);
     }
   }
Index: src/java/org/apache/nutch/crawl/CrawlDbReader.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDbReader.java	(revision 1432381)
+++ src/java/org/apache/nutch/crawl/CrawlDbReader.java	(working copy)
@@ -175,10 +175,10 @@
     public void reduce(Text key, Iterator<LongWritable> values, OutputCollector<Text, LongWritable> output, Reporter reporter)
         throws IOException {
       val.set(0L);
-      String k = ((Text)key).toString();
+      String k = key.toString();
       if (!k.equals("s")) {
         while (values.hasNext()) {
-          LongWritable cnt = (LongWritable)values.next();
+          LongWritable cnt = values.next();
           val.set(val.get() + cnt.get());
         }
         output.collect(key, val);
@@ -187,7 +187,7 @@
         long min = Long.MAX_VALUE;
         long max = Long.MIN_VALUE;
         while (values.hasNext()) {
-          LongWritable cnt = (LongWritable)values.next();
+          LongWritable cnt = values.next();
           if (cnt.get() < min) min = cnt.get();
           if (cnt.get() > max) max = cnt.get();
           total += cnt.get();
@@ -205,40 +205,40 @@
     public void reduce(Text key, Iterator<LongWritable> values, OutputCollector<Text, LongWritable> output, Reporter reporter)
             throws IOException {
 
-      String k = ((Text) key).toString();
+      String k = key.toString();
       if (k.equals("T")) {
         // sum all values for this key
         long sum = 0;
         while (values.hasNext()) {
-          sum += ((LongWritable) values.next()).get();
+          sum += values.next().get();
         }
         // output sum
         output.collect(key, new LongWritable(sum));
       } else if (k.startsWith("status") || k.startsWith("retry")) {
         LongWritable cnt = new LongWritable();
         while (values.hasNext()) {
-          LongWritable val = (LongWritable)values.next();
+          LongWritable val = values.next();
           cnt.set(cnt.get() + val.get());
         }
         output.collect(key, cnt);
       } else if (k.equals("scx")) {
         LongWritable cnt = new LongWritable(Long.MIN_VALUE);
         while (values.hasNext()) {
-          LongWritable val = (LongWritable)values.next();
+          LongWritable val = values.next();
           if (cnt.get() < val.get()) cnt.set(val.get());
         }
         output.collect(key, cnt);
       } else if (k.equals("scn")) {
         LongWritable cnt = new LongWritable(Long.MAX_VALUE);
         while (values.hasNext()) {
-          LongWritable val = (LongWritable)values.next();
+          LongWritable val = values.next();
           if (cnt.get() > val.get()) cnt.set(val.get());
         }
         output.collect(key, cnt);
       } else if (k.equals("sct")) {
         LongWritable cnt = new LongWritable();
         while (values.hasNext()) {
-          LongWritable val = (LongWritable)values.next();
+          LongWritable val = values.next();
           cnt.set(cnt.get() + val.get());
         }
         output.collect(key, cnt);
@@ -356,9 +356,9 @@
         String k = entry.getKey();
         LongWritable val = entry.getValue();
         if (k.equals("scn")) {
-          LOG.info("min score:\t" + (float) (val.get() / 1000.0f));
+          LOG.info("min score:\t" + (val.get() / 1000.0f));
         } else if (k.equals("scx")) {
-          LOG.info("max score:\t" + (float) (val.get() / 1000.0f));
+          LOG.info("max score:\t" + (val.get() / 1000.0f));
         } else if (k.equals("sct")) {
           LOG.info("avg score:\t" + (float) ((((double)val.get()) / totalCnt.get()) / 1000.0));
         } else if (k.startsWith("status")) {
Index: src/java/org/apache/nutch/crawl/CrawlDbReducer.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDbReducer.java	(revision 1432381)
+++ src/java/org/apache/nutch/crawl/CrawlDbReducer.java	(working copy)
@@ -75,7 +75,7 @@
     org.apache.hadoop.io.MapWritable metaFromParse = null;
     
     while (values.hasNext()) {
-      CrawlDatum datum = (CrawlDatum)values.next();
+      CrawlDatum datum = values.next();
       if (!multiple && values.hasNext()) multiple = true;
       if (CrawlDatum.hasDbStatus(datum)) {
         if (!oldSet) {
@@ -184,10 +184,10 @@
       if (oldSet) {                          // if old exists
         result.set(old);                          // use it
       } else {
-        result = schedule.initializeSchedule((Text)key, result);
+        result = schedule.initializeSchedule(key, result);
         result.setStatus(CrawlDatum.STATUS_DB_UNFETCHED);
         try {
-          scfilters.initialScore((Text)key, result);
+          scfilters.initialScore(key, result);
         } catch (ScoringFilterException e) {
           if (LOG.isWarnEnabled()) {
             LOG.warn("Cannot filter init score for url " + key +
@@ -216,7 +216,7 @@
         }
       }
       // set the schedule
-      result = schedule.setFetchSchedule((Text)key, result, prevFetchTime,
+      result = schedule.setFetchSchedule(key, result, prevFetchTime,
           prevModifiedTime, fetch.getFetchTime(), fetch.getModifiedTime(), modified);
       // set the result status and signature
       if (modified == FetchSchedule.STATUS_NOTMODIFIED) {
@@ -254,7 +254,7 @@
       // NOTMODIFIED state, when the old fetched copy was already removed with
       // old segments.
       if (maxInterval < result.getFetchInterval())
-        result = schedule.forceRefetch((Text)key, result, false);
+        result = schedule.forceRefetch(key, result, false);
       break;
     case CrawlDatum.STATUS_SIGNATURE:
       if (LOG.isWarnEnabled()) {
@@ -265,7 +265,7 @@
       if (oldSet) {
         result.setSignature(old.getSignature());  // use old signature
       }
-      result = schedule.setPageRetrySchedule((Text)key, result, prevFetchTime,
+      result = schedule.setPageRetrySchedule(key, result, prevFetchTime,
           prevModifiedTime, fetch.getFetchTime());
       if (result.getRetriesSinceFetch() < retryMax) {
         result.setStatus(CrawlDatum.STATUS_DB_UNFETCHED);
@@ -278,7 +278,7 @@
       if (oldSet)
         result.setSignature(old.getSignature());  // use old signature
       result.setStatus(CrawlDatum.STATUS_DB_GONE);
-      result = schedule.setPageGoneSchedule((Text)key, result, prevFetchTime,
+      result = schedule.setPageGoneSchedule(key, result, prevFetchTime,
           prevModifiedTime, fetch.getFetchTime());
       break;
 
@@ -287,7 +287,7 @@
     }
 
     try {
-      scfilters.updateDbScore((Text)key, oldSet ? old : null, result, linkList);
+      scfilters.updateDbScore(key, oldSet ? old : null, result, linkList);
     } catch (Exception e) {
       if (LOG.isWarnEnabled()) {
         LOG.warn("Couldn't update score, key=" + key + ": " + e);
Index: src/java/org/apache/nutch/crawl/CrawlDatum.java
===================================================================
--- src/java/org/apache/nutch/crawl/CrawlDatum.java	(revision 1432381)
+++ src/java/org/apache/nutch/crawl/CrawlDatum.java	(working copy)
@@ -457,7 +457,7 @@
     int res = 0;
     if (signature != null) {
       for (int i = 0; i < signature.length / 4; i += 4) {
-        res ^= (int)(signature[i] << 24 + signature[i+1] << 16 +
+        res ^= (signature[i] << 24 + signature[i+1] << 16 +
                 signature[i+2] << 8 + signature[i+3]);
       }
     }
Index: src/java/org/apache/nutch/parse/ParseSegment.java
===================================================================
--- src/java/org/apache/nutch/parse/ParseSegment.java	(revision 1432381)
+++ src/java/org/apache/nutch/parse/ParseSegment.java	(working copy)
@@ -180,7 +180,7 @@
   public void reduce(Text key, Iterator<Writable> values,
                      OutputCollector<Text, Writable> output, Reporter reporter)
     throws IOException {
-    output.collect(key, (Writable)values.next()); // collect first value
+    output.collect(key, values.next()); // collect first value
   }
 
   public void parse(Path segment) throws IOException {
Index: src/java/org/apache/nutch/parse/ParseOutputFormat.java
===================================================================
--- src/java/org/apache/nutch/parse/ParseOutputFormat.java	(revision 1432381)
+++ src/java/org/apache/nutch/parse/ParseOutputFormat.java	(working copy)
@@ -251,7 +251,7 @@
 
           try {
             // compute score contributions and adjustment to the original score
-            adjust = scfilters.distributeScoreToOutlinks((Text)key, parseData, 
+            adjust = scfilters.distributeScoreToOutlinks(key, parseData, 
                       targets, null, links.length);
           } catch (ScoringFilterException e) {
             LOG.warn("Cannot distribute score from " + key + ": " + e.getMessage());
Index: src/java/org/apache/nutch/fetcher/Fetcher.java
===================================================================
--- src/java/org/apache/nutch/fetcher/Fetcher.java	(revision 1432381)
+++ src/java/org/apache/nutch/fetcher/Fetcher.java	(working copy)
@@ -1079,7 +1079,7 @@
             }
 
             // Overwrite the outlinks in ParseData with the normalized and filtered set
-            parseData.setOutlinks((Outlink[])outlinkList.toArray(new Outlink[outlinkList.size()]));
+            parseData.setOutlinks(outlinkList.toArray(new Outlink[outlinkList.size()]));
 
             output.collect(url, new NutchWritable(
                     new ParseImpl(new ParseText(parse.getText()),
Index: src/java/org/apache/nutch/indexer/solr/SolrMappingReader.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrMappingReader.java	(revision 1432381)
+++ src/java/org/apache/nutch/indexer/solr/SolrMappingReader.java	(working copy)
@@ -122,21 +122,21 @@
 
   public String hasCopy(String key) {
     if (copyMap.containsKey(key)) {
-      key = (String) copyMap.get(key);
+      key = copyMap.get(key);
     }
     return key;
   }
 
   public String mapKey(String key) throws IOException {
     if(keyMap.containsKey(key)) {
-      key = (String) keyMap.get(key);
+      key = keyMap.get(key);
     }
     return key;
   }
 
   public String mapCopyKey(String key) throws IOException {
     if(copyMap.containsKey(key)) {
-      key = (String) copyMap.get(key);
+      key = copyMap.get(key);
     }
     return key;
   }
Index: src/java/org/apache/nutch/tools/arc/ArcRecordReader.java
===================================================================
--- src/java/org/apache/nutch/tools/arc/ArcRecordReader.java	(revision 1432381)
+++ src/java/org/apache/nutch/tools/arc/ArcRecordReader.java	(working copy)
@@ -122,14 +122,14 @@
    * Creates a new instance of the <code>Text</code> object for the key.
    */
   public Text createKey() {
-    return (Text)ReflectionUtils.newInstance(Text.class, conf);
+    return ReflectionUtils.newInstance(Text.class, conf);
   }
 
   /**
    * Creates a new instance of the <code>BytesWritable</code> object for the key
    */
   public BytesWritable createValue() {
-    return (BytesWritable)ReflectionUtils.newInstance(BytesWritable.class, conf);
+    return ReflectionUtils.newInstance(BytesWritable.class, conf);
   }
 
   /**
@@ -264,9 +264,9 @@
         System.arraycopy(content, eol + 1, raw, 0, raw.length);
         
         // populate key and values with the header and raw content.
-        Text keyText = (Text)key;
+        Text keyText = key;
         keyText.set(header);
-        BytesWritable valueBytes = (BytesWritable)value;
+        BytesWritable valueBytes = value;
         valueBytes.set(raw, 0, raw.length);
 
         // TODO: It would be best to start at the end of the gzip read but 
Index: src/java/org/apache/nutch/tools/FreeGenerator.java
===================================================================
--- src/java/org/apache/nutch/tools/FreeGenerator.java	(revision 1432381)
+++ src/java/org/apache/nutch/tools/FreeGenerator.java	(working copy)
@@ -124,7 +124,7 @@
       // pick unique urls from values - discard the reduce key due to hash collisions
       HashMap<Text, CrawlDatum> unique = new HashMap<Text, CrawlDatum>();
       while (values.hasNext()) {
-        Generator.SelectorEntry entry = (Generator.SelectorEntry)values.next();
+        Generator.SelectorEntry entry = values.next();
         unique.put(entry.url, entry.datum);
       }
       // output unique urls
Index: src/java/org/apache/nutch/segment/SegmentMerger.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentMerger.java	(revision 1432381)
+++ src/java/org/apache/nutch/segment/SegmentMerger.java	(working copy)
@@ -213,7 +213,7 @@
         public void write(Text key, MetaWrapper wrapper) throws IOException {
           // unwrap
           SegmentPart sp = SegmentPart.parse(wrapper.getMeta(SEGMENT_PART_KEY));
-          Writable o = (Writable)wrapper.get();
+          Writable o = wrapper.get();
           String slice = wrapper.getMeta(SEGMENT_SLICE_KEY);
           if (o instanceof CrawlDatum) {
             if (sp.partName.equals(CrawlDatum.GENERATE_DIR_NAME)) {
Index: src/java/org/apache/nutch/segment/SegmentReader.java
===================================================================
--- src/java/org/apache/nutch/segment/SegmentReader.java	(revision 1432381)
+++ src/java/org/apache/nutch/segment/SegmentReader.java	(working copy)
@@ -237,7 +237,7 @@
       writer = new PrintWriter(new BufferedWriter(new OutputStreamWriter(fs.create(dumpFile))));
       try {
         for (int i = 0; i < files.length; i++) {
-          Path partFile = (Path) files[i];
+          Path partFile = files[i];
           try {
             currentRecordNumber = append(fs, job, partFile, writer, currentRecordNumber);
           } catch (IOException exception) {
Index: src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java
===================================================================
--- src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java	(revision 1432381)
+++ src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java	(working copy)
@@ -49,8 +49,8 @@
       throws IOException {
       sequenceFileRecordReader = new SequenceFileRecordReader<Text, Content>(
         conf, split);
-      innerKey = (Text)sequenceFileRecordReader.createKey();
-      innerValue = (Content)sequenceFileRecordReader.createValue();
+      innerKey = sequenceFileRecordReader.createKey();
+      innerValue = sequenceFileRecordReader.createValue();
     }
 
     public Text createKey() {
Index: src/java/org/apache/nutch/scoring/webgraph/Loops.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/Loops.java	(revision 1432381)
+++ src/java/org/apache/nutch/scoring/webgraph/Loops.java	(working copy)
@@ -348,7 +348,7 @@
         ObjectWritable next = values.next();
         Object value = next.get();
         if (value instanceof Route) {
-          routeList.add((Route)WritableUtils.clone((Route)value, conf));
+          routeList.add(WritableUtils.clone((Route)value, conf));
         }
         else if (value instanceof Text) {
           String outlinkUrl = ((Text)value).toString();
Index: src/java/org/apache/nutch/scoring/webgraph/WebGraph.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/WebGraph.java	(revision 1432381)
+++ src/java/org/apache/nutch/scoring/webgraph/WebGraph.java	(working copy)
@@ -345,7 +345,7 @@
           if (mostRecent == 0L || mostRecent < timestamp) {
             mostRecent = timestamp;
           }
-          outlinkList.add((LinkDatum)WritableUtils.clone(next, conf));
+          outlinkList.add(WritableUtils.clone(next, conf));
           reporter.incrCounter("WebGraph.outlinks", "added links", 1);
         }
         else if (value instanceof BooleanWritable) {
Index: src/java/org/apache/nutch/scoring/webgraph/LinkRank.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/LinkRank.java	(revision 1432381)
+++ src/java/org/apache/nutch/scoring/webgraph/LinkRank.java	(working copy)
@@ -338,7 +338,7 @@
       throws IOException {
 
       String url = key.toString();
-      Node outNode = (Node)WritableUtils.clone(node, conf);
+      Node outNode = WritableUtils.clone(node, conf);
       outNode.setInlinkScore(initialScore);
 
       output.collect(new Text(url), outNode);
@@ -397,7 +397,7 @@
           node = (Node)obj;
         }
         else if (obj instanceof LinkDatum) {
-          outlinks.add((LinkDatum)WritableUtils.clone((LinkDatum)obj, conf));
+          outlinks.add(WritableUtils.clone((LinkDatum)obj, conf));
         }
         else if (obj instanceof LoopSet) {
           loops = (LoopSet)obj;
@@ -555,7 +555,7 @@
         + numInlinks + " iteration: " + itNum);
 
       // store the score in a temporary NodeDb
-      Node outNode = (Node)WritableUtils.clone(node, conf);
+      Node outNode = WritableUtils.clone(node, conf);
       outNode.setInlinkScore(linkRankScore);
       output.collect(key, outNode);
     }
Index: src/java/org/apache/nutch/scoring/webgraph/LinkDumper.java
===================================================================
--- src/java/org/apache/nutch/scoring/webgraph/LinkDumper.java	(revision 1432381)
+++ src/java/org/apache/nutch/scoring/webgraph/LinkDumper.java	(working copy)
@@ -265,7 +265,7 @@
           node = (Node)obj;
         }
         else if (obj instanceof LinkDatum) {
-          outlinks.add((LinkDatum)WritableUtils.clone((LinkDatum)obj, conf));
+          outlinks.add(WritableUtils.clone((LinkDatum)obj, conf));
         }
         else if (obj instanceof LoopSet) {
           loops = (LoopSet)obj;
@@ -323,7 +323,7 @@
       while (values.hasNext()) {
         LinkNode cur = values.next();
         if (numNodes < maxInlinks) {
-          nodeList.add((LinkNode)WritableUtils.clone(cur, conf));
+          nodeList.add(WritableUtils.clone(cur, conf));
           numNodes++;
         }
         else {
