Index: src/java/org/apache/nutch/indexer/solr/SolrIndexer.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrIndexer.java	(revision 1147191)
+++ src/java/org/apache/nutch/indexer/solr/SolrIndexer.java	(working copy)
@@ -100,19 +100,22 @@
 
   public int run(String[] args) throws Exception {
     if (args.length < 4) {
-      System.err.println("Usage: SolrIndexer <solr url> <crawldb> <linkdb> (<segment> ... | -dir <segments>) [-noCommit]");
+      System.err.println("Usage: SolrIndexer <solr url> <crawldb> [-linkdb <linkdb>] (<segment> ... | -dir <segments>) [-noCommit]");
       return -1;
     }
 
     final Path crawlDb = new Path(args[1]);
-    final Path linkDb = new Path(args[2]);
+    Path linkDb = null;
 
     final List<Path> segments = new ArrayList<Path>();
 
     boolean noCommit = false;
 
-    for (int i = 3; i < args.length; i++) {
-      if (args[i].equals("-dir")) {
+    for (int i = 2; i < args.length; i++) {
+    	if (args[i].equals("-linkdb")) {
+    		linkDb = new Path(args[++i]);
+    	}
+    	else if (args[i].equals("-dir")) {
         Path dir = new Path(args[++i]);
         FileSystem fs = dir.getFileSystem(getConf());
         FileStatus[] fstats = fs.listStatus(dir,
Index: src/java/org/apache/nutch/indexer/IndexerMapReduce.java
===================================================================
--- src/java/org/apache/nutch/indexer/IndexerMapReduce.java	(revision 1147191)
+++ src/java/org/apache/nutch/indexer/IndexerMapReduce.java	(working copy)
@@ -23,6 +23,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -172,7 +173,9 @@
                            JobConf job) {
 
     LOG.info("IndexerMapReduce: crawldb: " + crawlDb);
-    LOG.info("IndexerMapReduce: linkdb: " + linkDb);
+    
+    if (linkDb!=null)
+      LOG.info("IndexerMapReduce: linkdb: " + linkDb);
 
     for (final Path segment : segments) {
       LOG.info("IndexerMapReduces: adding segment: " + segment);
@@ -183,7 +186,10 @@
     }
 
     FileInputFormat.addInputPath(job, new Path(crawlDb, CrawlDb.CURRENT_NAME));
-    FileInputFormat.addInputPath(job, new Path(linkDb, LinkDb.CURRENT_NAME));
+    
+    if (linkDb!=null)
+	  FileInputFormat.addInputPath(job, new Path(linkDb, LinkDb.CURRENT_NAME));
+    
     job.setInputFormat(SequenceFileInputFormat.class);
 
     job.setMapperClass(IndexerMapReduce.class);
