Index: conf/nutch-default.xml
===================================================================
--- conf/nutch-default.xml	(revision 1156835)
+++ conf/nutch-default.xml	(working copy)
@@ -730,6 +730,15 @@
 <!-- AnchorIndexing filter plugin properties -->
 
 <property>
+  <name>moreIndexingFilter.indexMimeTypeParts</name>
+  <value>true</value>
+  <description>Determines whether the index-more plugin will split the mime-type
+  in sub parts, this requires the type field to be multi valued. Set to true for backward
+  compatibility. False will not split the mime-type.
+  </description>
+</property>
+
+<property>
   <name>anchorIndexingFilter.deduplicate</name>
   <value>false</value>
   <description>With this enabled the indexer will case-insensitive deduplicate hanchors
@@ -1060,6 +1069,16 @@
   </description>
 </property>
 
+<property>
+  <name>solr.auth</name>
+  <value>false</value>
+  <description>
+  Whether to enable HTTP basic authentication for communicating with Solr.
+  Use the solr.auth.username and solr.auth.password properties to configure
+  your credentials.
+  </description>
+</property>
+
 <!-- storage properties -->
 
 <property>
Index: src/java/org/apache/nutch/indexer/solr/SolrDeleteDuplicates.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrDeleteDuplicates.java	(revision 1156835)
+++ src/java/org/apache/nutch/indexer/solr/SolrDeleteDuplicates.java	(working copy)
@@ -45,7 +45,6 @@
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServer;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
@@ -229,7 +228,7 @@
     throws IOException, InterruptedException {
       Configuration conf = context.getConfiguration();
       int numSplits = context.getNumReduceTasks();
-      SolrServer solr = new CommonsHttpSolrServer(conf.get(SolrConstants.SERVER_URL));
+      SolrServer solr = SolrUtils.getCommonsHttpSolrServer(conf);
 
       final SolrQuery solrQuery = new SolrQuery(SOLR_GET_ALL_QUERY);
       solrQuery.setFields(SolrConstants.ID_FIELD);
@@ -259,7 +258,7 @@
     public RecordReader<Text, SolrRecord> createRecordReader(InputSplit split,
         TaskAttemptContext context) throws IOException, InterruptedException {
       Configuration conf = context.getConfiguration();
-      SolrServer solr = new CommonsHttpSolrServer(conf.get(SolrConstants.SERVER_URL));
+      SolrServer solr = SolrUtils.getCommonsHttpSolrServer(conf);
       SolrInputSplit solrSplit = (SolrInputSplit) split;
       final int numDocs = (int) solrSplit.getLength();
       
@@ -304,7 +303,7 @@
   public void setup(Context job) throws IOException {
     Configuration conf = job.getConfiguration();
     try {
-      solr = new CommonsHttpSolrServer(conf.get(SolrConstants.SERVER_URL));
+       solr = SolrUtils.getCommonsHttpSolrServer(conf);
     } catch (MalformedURLException e) {
       throw new IOException(e);
     }
@@ -340,6 +339,7 @@
         updateRequest.deleteById(solrRecord.id);
       }
       numDeletes++;
+      context.getCounter("SolrDedupStatus", "Deleted documents").increment(1);
       if (numDeletes >= NUM_MAX_DELETE_REQUEST) {
         try {
           updateRequest.process(solr);
Index: src/java/org/apache/nutch/indexer/solr/SolrIndexerJob.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrIndexerJob.java	(revision 1156835)
+++ src/java/org/apache/nutch/indexer/solr/SolrIndexerJob.java	(working copy)
@@ -37,7 +37,6 @@
 import org.apache.nutch.util.NutchTool;
 import org.apache.nutch.util.ToolUtil;
 import org.apache.solr.client.solrj.SolrServer;
-import org.apache.solr.client.solrj.impl.CommonsHttpSolrServer;
 
 public class SolrIndexerJob extends IndexerJob {
 
@@ -68,7 +67,7 @@
           Nutch.ARG_SOLR, solrUrl,
           Nutch.ARG_BATCH, batchId));
       // do the commits once and for all the reducers in one go
-      SolrServer solr = new CommonsHttpSolrServer(solrUrl);
+      SolrServer solr = SolrUtils.getCommonsHttpSolrServer(getConf());
       solr.commit();
     } finally {
       FileSystem.get(getConf()).delete(
Index: src/java/org/apache/nutch/indexer/solr/SolrConstants.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrConstants.java	(revision 1156835)
+++ src/java/org/apache/nutch/indexer/solr/SolrConstants.java	(working copy)
@@ -24,7 +24,13 @@
   public static final String COMMIT_SIZE = SOLR_PREFIX + "commit.size";
 
   public static final String MAPPING_FILE = SOLR_PREFIX + "mapping.file";
-  
+
+  public static final String USE_AUTH = SOLR_PREFIX + "auth";
+
+  public static final String USERNAME = SOLR_PREFIX + "auth.username";
+
+  public static final String PASSWORD = SOLR_PREFIX + "auth.password";
+
   public static final String ID_FIELD = "id";
   
   public static final String URL_FIELD = "url";
Index: src/java/org/apache/nutch/indexer/solr/SolrWriter.java
===================================================================
--- src/java/org/apache/nutch/indexer/solr/SolrWriter.java	(revision 1156835)
+++ src/java/org/apache/nutch/indexer/solr/SolrWriter.java	(working copy)
@@ -21,6 +21,8 @@
 import java.util.List;
 import java.util.Map.Entry;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.nutch.indexer.NutchDocument;
@@ -32,6 +34,8 @@
 
 public class SolrWriter implements NutchIndexWriter {
 
+  public static Logger LOG = LoggerFactory.getLogger(SolrWriter.class);
+
   private SolrServer solr;
   private SolrMappingReader solrMapping;
 
@@ -44,7 +48,7 @@
   public void open(TaskAttemptContext job, String name)
   throws IOException {
     Configuration conf = job.getConfiguration();
-    solr = new CommonsHttpSolrServer(conf.get(SolrConstants.SERVER_URL));
+    solr = SolrUtils.getCommonsHttpSolrServer(conf);
     commitSize = conf.getInt(SolrConstants.COMMIT_SIZE, 1000);
     solrMapping = SolrMappingReader.getInstance(conf);
   }
@@ -54,10 +58,16 @@
     final SolrInputDocument inputDoc = new SolrInputDocument();
     for(final Entry<String, List<String>> e : doc) {
       for (final String val : e.getValue()) {
-        inputDoc.addField(solrMapping.mapKey(e.getKey()), val);
+
+        Object val2 = val;
+        if (e.getKey().equals("content")) {
+          val2 = SolrUtils.stripNonCharCodepoints((String)val);
+        }
+
+        inputDoc.addField(solrMapping.mapKey(e.getKey()), val2);
         String sCopy = solrMapping.mapCopyKey(e.getKey());
         if (sCopy != e.getKey()) {
-        	inputDoc.addField(sCopy, val);
+        	inputDoc.addField(sCopy, val2);
         }
       }
     }
@@ -65,6 +75,7 @@
     inputDocs.add(inputDoc);
     if (inputDocs.size() >= commitSize) {
       try {
+        LOG.info("Adding " + Integer.toString(inputDocs.size()) + " documents");
         solr.add(inputDocs);
       } catch (final SolrServerException e) {
         throw new IOException(e);
@@ -77,6 +88,7 @@
   public void close() throws IOException {
     try {
       if (!inputDocs.isEmpty()) {
+        LOG.info("Adding " + Integer.toString(inputDocs.size()) + " documents");
         solr.add(inputDocs);
         inputDocs.clear();
       }
@@ -84,5 +96,4 @@
       throw new IOException(e);
     }
   }
-
 }
Index: src/java/org/apache/nutch/indexer/IndexerReducer.java
===================================================================
--- src/java/org/apache/nutch/indexer/IndexerReducer.java	(revision 1156835)
+++ src/java/org/apache/nutch/indexer/IndexerReducer.java	(working copy)
@@ -73,11 +73,15 @@
       doc = filters.filter(doc, url, page);
     } catch (IndexingException e) {
       LOG.warn("Error indexing "+key+": "+e);
+      context.getCounter("IndexerStatus", "Errors").increment(1);
       return;
     }
 
     // skip documents discarded by indexing filters
-    if (doc == null) return;
+    if (doc == null) {
+      context.getCounter("IndexerStatus", "Skipped by filters").increment(1);
+      return;
+    }
 
     float boost = 1.0f;
     // run scoring filters
@@ -97,6 +101,7 @@
       Mark.INDEX_MARK.putMark(page, Mark.UPDATEDB_MARK.checkMark(page));
       store.put(key, page);
     }
+    context.getCounter("SolrDedupStatus", "Documents added").increment(1);
     context.write(key, doc);
   }
 
