Index: src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/HttpBase.java
===================================================================
--- src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/HttpBase.java	(revision 1479031)
+++ src/plugin/lib-http/src/java/org/apache/nutch/protocol/http/api/HttpBase.java	(working copy)
@@ -131,9 +131,7 @@
   public Configuration getConf() {
     return this.conf;
   }
-   
   
-  
   public ProtocolOutput getProtocolOutput(Text url, CrawlDatum datum) {
     
     String urlString = url.toString();
Index: src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpRobotRulesParser.java
===================================================================
--- src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpRobotRulesParser.java	(revision 0)
+++ src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpRobotRulesParser.java	(working copy)
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nutch.protocol.ftp;
+
+import java.net.URL;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.nutch.crawl.CrawlDatum;
+import org.apache.nutch.protocol.Protocol;
+import org.apache.nutch.protocol.ProtocolOutput;
+import org.apache.nutch.protocol.ProtocolStatus;
+import org.apache.nutch.protocol.RobotRulesParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import crawlercommons.robots.BaseRobotRules;
+import crawlercommons.robots.SimpleRobotRules;
+
+public class FtpRobotRulesParser extends RobotRulesParser {
+
+  private static final String CONTENT_TYPE = "text/plain";
+  public static final Logger LOG = LoggerFactory.getLogger(FtpRobotRulesParser.class);
+
+  FtpRobotRulesParser() { }
+
+  public FtpRobotRulesParser(Configuration conf) {
+    super(conf);
+  }
+
+  /**
+   * The hosts for which the caching of robots rules is yet to be done,
+   * it sends a Ftp request to the host corresponding to the {@link URL} 
+   * passed, gets robots file, parses the rules and caches the rules object
+   * to avoid re-work in future.
+   * 
+   *  @param ftp The {@link Protocol} object
+   *  @param url URL 
+   *  
+   *  @return robotRules A {@link BaseRobotRules} object for the rules
+   */
+  public BaseRobotRules getRobotRulesSet(Protocol ftp, URL url) {
+
+    String protocol = url.getProtocol().toLowerCase();  // normalize to lower case
+    String host = url.getHost().toLowerCase();          // normalize to lower case
+
+    BaseRobotRules robotRules = (SimpleRobotRules) CACHE.get(protocol + ":" + host);
+
+    boolean cacheRule = true;
+
+    if (robotRules == null) {                     // cache miss
+
+      if (LOG.isTraceEnabled())
+        LOG.trace("cache miss " + url);
+
+      try {
+        Text robotsUrl = new Text(new URL(url, "/robots.txt").toString());
+        ProtocolOutput output = ((Ftp)ftp).getProtocolOutput(robotsUrl, new CrawlDatum());
+        ProtocolStatus status = output.getStatus();
+
+        LOG.info("TEJAS Request status code : " + status.getCode());
+        if (status.getCode() == ProtocolStatus.SUCCESS) {
+          LOG.info("TEJAS Output content : " + output.getContent().toString());
+          robotRules =  parseRules(url.toString(), output.getContent().toString().getBytes(), 
+                                  CONTENT_TYPE, agentNames);
+        } else {                                       
+          robotRules = EMPTY_RULES;                 // use default rules
+        }
+      } catch (Throwable t) {
+        if (LOG.isInfoEnabled()) {
+          t.printStackTrace();
+          LOG.info("Couldn't get robots.txt for " + url + ": " + t.toString());
+          LOG.error("TEJAS", t);
+        }
+        cacheRule = false;
+        robotRules = EMPTY_RULES;
+      }
+
+      if (cacheRule)
+        CACHE.put(protocol + ":" + host, robotRules);  // cache rules for host
+    }
+    return robotRules;
+  }
+}
Index: src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java
===================================================================
--- src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java	(revision 1479031)
+++ src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java	(working copy)
@@ -29,11 +29,9 @@
 import org.apache.hadoop.conf.Configuration;
 
 import org.apache.nutch.protocol.Content;
-import org.apache.nutch.protocol.RobotRulesParser;
 import org.apache.nutch.protocol.Protocol;
 import org.apache.nutch.protocol.ProtocolOutput;
 import org.apache.nutch.protocol.ProtocolStatus;
-
 import crawlercommons.robots.BaseRobotRules;
 
 import java.net.URL;
@@ -84,9 +82,11 @@
 
   private Configuration conf;
 
+  private FtpRobotRulesParser robots = null;
 
   // constructor
   public Ftp() {
+    robots = new FtpRobotRulesParser();
   }
 
   /** Set the timeout. */
@@ -240,6 +240,7 @@
     this.serverTimeout = conf.getInt("ftp.server.timeout", 60 * 1000);
     this.keepConnection = conf.getBoolean("ftp.keep.connection", false);
     this.followTalk = conf.getBoolean("ftp.follow.talk", false);
+    this.robots.setConf(conf);
   }
 
   /**
@@ -250,12 +251,10 @@
   }
 
   /** 
-   * Currently, no robots parsing is done for ftp protocol 
-   * and this returns a set of empty rules which will allow every url.
-   * There a jira logged for the same NUTCH-1513
+   * Get the robots rules for a given url
    */
   public BaseRobotRules getRobotRules(Text url, CrawlDatum datum) {
-    return RobotRulesParser.EMPTY_RULES;
+    return robots.getRobotRulesSet(this, url);
   }
 }
 
