Index: src/java/org/apache/nutch/crawl/URLPartitioner.java
===================================================================
--- src/java/org/apache/nutch/crawl/URLPartitioner.java	(revision 1450695)
+++ src/java/org/apache/nutch/crawl/URLPartitioner.java	(working copy)
@@ -43,7 +43,8 @@
   public static final String PARTITION_MODE_IP = "byIP";
 
   private int seed;
-  private URLNormalizers normalizers;
+  private boolean normalize = true;
+  private URLNormalizers normalizers = null;
   private String mode = PARTITION_MODE_HOST;
 
   public void configure(JobConf job) {
@@ -55,7 +56,10 @@
       LOG.error("Unknown partition mode : " + mode + " - forcing to byHost");
       mode = PARTITION_MODE_HOST;
     }
-    normalizers = new URLNormalizers(job, URLNormalizers.SCOPE_PARTITION);
+    normalize = job.getBoolean(Generator.GENERATOR_NORMALISE, true);
+    if (normalize) {
+      normalizers = new URLNormalizers(job, URLNormalizers.SCOPE_PARTITION);
+    }
   }
 
   public void close() {}
@@ -66,7 +70,9 @@
     URL url = null;
     int hashCode = urlString.hashCode();
     try {
-      urlString = normalizers.normalize(urlString, URLNormalizers.SCOPE_PARTITION);
+      if (normalize) {
+        urlString = normalizers.normalize(urlString, URLNormalizers.SCOPE_PARTITION);
+      }
       url = new URL(urlString);
       hashCode = url.getHost().hashCode();
     } catch (MalformedURLException e) {
Index: src/java/org/apache/nutch/crawl/Generator.java
===================================================================
--- src/java/org/apache/nutch/crawl/Generator.java	(revision 1450695)
+++ src/java/org/apache/nutch/crawl/Generator.java	(working copy)
@@ -563,7 +563,7 @@
         Path subfetchlist = stat.getPath();
         if (!subfetchlist.getName().startsWith("fetchlist-")) continue;
         // start a new partition job for this segment
-        Path newSeg = partitionSegment(fs, segments, subfetchlist, numLists);
+        Path newSeg = partitionSegment(fs, segments, subfetchlist, numLists, norm);
         generatedSegments.add(newSeg);
       }
     } catch (Exception e) {
@@ -620,9 +620,14 @@
     Path[] patharray = new Path[generatedSegments.size()];
     return generatedSegments.toArray(patharray);
   }
+  
+  private Path partitionSegment(FileSystem fs, Path segmentsDir, Path inputDir,
+      int numLists) throws IOException {
+      return partitionSegment(fs, segmentsDir, inputDir, numLists, true);
+  }
 
   private Path partitionSegment(FileSystem fs, Path segmentsDir, Path inputDir,
-      int numLists) throws IOException {
+      int numLists, boolean norm) throws IOException {
     // invert again, partition by host/domain/IP, sort by url hash
     if (LOG.isInfoEnabled()) {
       LOG.info("Generator: Partitioning selected urls for politeness.");
@@ -636,6 +641,7 @@
     job.setJobName("generate: partition " + segment);
 
     job.setInt("partition.url.seed", new Random().nextInt());
+    job.setBoolean(GENERATOR_NORMALISE, norm);
 
     FileInputFormat.addInputPath(job, inputDir);
     job.setInputFormat(SequenceFileInputFormat.class);
Index: src/java/org/apache/nutch/tools/FreeGenerator.java
===================================================================
--- src/java/org/apache/nutch/tools/FreeGenerator.java	(revision 1450695)
+++ src/java/org/apache/nutch/tools/FreeGenerator.java	(working copy)
@@ -57,12 +57,12 @@
  * This tool generates fetchlists (segments to be fetched) from plain text
  * files containing one URL per line. It's useful when arbitrary URL-s need to
  * be fetched without adding them first to the CrawlDb, or during testing.
- * 
+ *
  * @author Andrzej Bialecki
  */
 public class FreeGenerator extends Configured implements Tool {
   private static final Logger LOG = LoggerFactory.getLogger(FreeGenerator.class);
-  
+
   private static final String FILTER_KEY = "free.generator.filter";
   private static final String NORMALIZE_KEY = "free.generator.normalize";
 
@@ -74,10 +74,12 @@
     private ScoringFilters scfilters;
     private CrawlDatum datum = new CrawlDatum();
     private Text url = new Text();
+    private int defaultInterval = 0;
 
     @Override
     public void configure(JobConf job) {
       super.configure(job);
+      defaultInterval = job.getInt("db.fetch.interval.default", 0);
       scfilters = new ScoringFilters(job);
       if (job.getBoolean(FILTER_KEY, false)) {
         filters = new URLFilters(job);
@@ -86,7 +88,7 @@
         normalizers = new URLNormalizers(job, URLNormalizers.SCOPE_INJECT);
       }
     }
-    
+
     Generator.SelectorEntry entry = new Generator.SelectorEntry();
 
     public void map(WritableComparable key, Text value, OutputCollector<Text,
@@ -116,6 +118,8 @@
       }
       entry.datum = datum;
       entry.url = url;
+      // https://issues.apache.org/jira/browse/NUTCH-1430
+      entry.datum.setFetchInterval(defaultInterval);
       output.collect(url, entry);
     }
 
@@ -124,7 +128,7 @@
       // pick unique urls from values - discard the reduce key due to hash collisions
       HashMap<Text, CrawlDatum> unique = new HashMap<Text, CrawlDatum>();
       while (values.hasNext()) {
-        Generator.SelectorEntry entry = values.next();
+        Generator.SelectorEntry entry = (Generator.SelectorEntry)values.next();
         unique.put(entry.url, entry.datum);
       }
       // output unique urls
@@ -133,7 +137,7 @@
       }
     }
   }
-  
+
   public int run(String[] args) throws Exception {
     if (args.length < 2) {
       System.err.println("Usage: FreeGenerator <inputDir> <segmentsDir> [-filter] [-normalize]");
@@ -158,14 +162,15 @@
         }
       }
     }
-    
+
     SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
     long start = System.currentTimeMillis();
     LOG.info("FreeGenerator: starting at " + sdf.format(start));
 
     JobConf job = new NutchJob(getConf());
     job.setBoolean(FILTER_KEY, filter);
-    job.setBoolean(NORMALIZE_KEY, normalize);
+    
+    job.setBoolean(Generator.GENERATOR_NORMALISE, normalize);
     FileInputFormat.addInputPath(job, new Path(args[0]));
     job.setInputFormat(TextInputFormat.class);
     job.setMapperClass(FG.class);
