diff --git a/conf/nutch-default.xml b/conf/nutch-default.xml
index 87c405883e..709da16cd1 100644
--- a/conf/nutch-default.xml
+++ b/conf/nutch-default.xml
@@ -2596,6 +2596,15 @@ visit https://wiki.apache.org/nutch/SimilarityScoringFilter-->
+
+ hostdb.deltaExpression
+
+
+ The expression for calculation of the delta statistics, the differences between of value of hostdb after update(currentHostDatum) and the value before(previousHostDatum). The return value in the KeyValuePair(String,Number) or KeyValuePair(String, String) format is written to the metadata of the hostdb.
+For example, {return new ("javafx.util.Pair","FetchedDelta", currentHostDatum.fetched - previousHostDatum.fetched);}
+
+
+
diff --git a/src/java/org/apache/nutch/hostdb/ReadHostDb.java b/src/java/org/apache/nutch/hostdb/ReadHostDb.java
index eac3bf6455..010327ebd6 100644
--- a/src/java/org/apache/nutch/hostdb/ReadHostDb.java
+++ b/src/java/org/apache/nutch/hostdb/ReadHostDb.java
@@ -21,46 +21,41 @@
import java.text.SimpleDateFormat;
import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.commons.jexl2.Expression;
+import org.apache.commons.jexl2.JexlContext;
+import org.apache.commons.jexl2.MapContext;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.nutch.util.NutchConfiguration;
-import org.apache.nutch.util.StringUtil;
import org.apache.nutch.util.TimingUtil;
-import org.apache.nutch.util.URLUtil;
-
-import org.apache.commons.jexl2.JexlContext;
-import org.apache.commons.jexl2.Expression;
-import org.apache.commons.jexl2.JexlEngine;
-import org.apache.commons.jexl2.MapContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
- * @see Commons
+ * @see Commons
*/
public class ReadHostDb extends Configured implements Tool {
- private static final Logger LOG = LoggerFactory
- .getLogger(MethodHandles.lookup().lookupClass());
+ private static final Logger LOG = LoggerFactory.getLogger(MethodHandles
+ .lookup().lookupClass());
public static final String HOSTDB_DUMP_HEADER = "hostdb.dump.field.header";
public static final String HOSTDB_DUMP_HOSTNAMES = "hostdb.dump.hostnames";
@@ -75,33 +70,33 @@ static class ReadHostDbMapper extends Mapper {
protected Expression expr = null;
public void setup(Context context) {
- dumpHomepages = context.getConfiguration().getBoolean(HOSTDB_DUMP_HOMEPAGES, false);
- dumpHostnames = context.getConfiguration().getBoolean(HOSTDB_DUMP_HOSTNAMES, false);
- fieldHeader = context.getConfiguration().getBoolean(HOSTDB_DUMP_HEADER, true);
+ dumpHomepages = context.getConfiguration().getBoolean(
+ HOSTDB_DUMP_HOMEPAGES, false);
+ dumpHostnames = context.getConfiguration().getBoolean(
+ HOSTDB_DUMP_HOSTNAMES, false);
+ fieldHeader = context.getConfiguration().getBoolean(HOSTDB_DUMP_HEADER,
+ true);
String expr = context.getConfiguration().get(HOSTDB_FILTER_EXPRESSION);
if (expr != null) {
- // Create or retrieve a JexlEngine
- JexlEngine jexl = new JexlEngine();
-
- // Dont't be silent and be strict
- jexl.setSilent(true);
- jexl.setStrict(true);
-
- // Create an expression object
- this.expr = jexl.createExpression(expr);
+ this.expr = org.apache.nutch.util.JexlUtil.parseExpression(expr);
}
}
- public void map(Text key, HostDatum datum, Context context) throws IOException, InterruptedException {
+ public void map(Text key, HostDatum datum, Context context)
+ throws IOException, InterruptedException {
if (fieldHeader && !dumpHomepages && !dumpHostnames) {
- context.write(new Text("hostname"), new Text("unfetched\tfetched\tgone\tredirTemp\tredirPerm\tredirSum\tok\tnumRecords\tdnsFail\tcnxFail\tsumFail\tscore\tlastCheck\thomepage\tmetadata"));
+ context
+ .write(
+ new Text("hostname"),
+ new Text(
+ "unfetched\tfetched\tgone\tredirTemp\tredirPerm\tredirSum\tok\tnumRecords\tdnsFail\tcnxFail\tsumFail\tscore\tlastCheck\thomepage\tmetadata"));
fieldHeader = false;
}
-
+
if (expr != null) {
// Create a context and add data
JexlContext jcontext = new MapContext();
-
+
// Set some fixed variables
jcontext.set("unfetched", datum.getUnfetched());
jcontext.set("fetched", datum.getFetched());
@@ -114,24 +109,25 @@ public void map(Text key, HostDatum datum, Context context) throws IOException,
jcontext.set("numRecords", datum.numRecords());
jcontext.set("dnsFailures", datum.getDnsFailures());
jcontext.set("connectionFailures", datum.getConnectionFailures());
-
+
// Set metadata variables
- for (Map.Entry entry : datum.getMetaData().entrySet()) {
+ for (Map.Entry entry : datum.getMetaData()
+ .entrySet()) {
Object value = entry.getValue();
-
+
if (value instanceof FloatWritable) {
- FloatWritable fvalue = (FloatWritable)value;
- Text tkey = (Text)entry.getKey();
+ FloatWritable fvalue = (FloatWritable) value;
+ Text tkey = (Text) entry.getKey();
jcontext.set(tkey.toString(), fvalue.get());
}
-
+
if (value instanceof IntWritable) {
- IntWritable ivalue = (IntWritable)value;
- Text tkey = (Text)entry.getKey();
+ IntWritable ivalue = (IntWritable) value;
+ Text tkey = (Text) entry.getKey();
jcontext.set(tkey.toString(), ivalue.get());
}
}
-
+
// Filter this record if evaluation did not pass
try {
if (!Boolean.TRUE.equals(expr.evaluate(jcontext))) {
@@ -141,35 +137,38 @@ public void map(Text key, HostDatum datum, Context context) throws IOException,
LOG.info(e.toString() + " for " + key.toString());
}
}
-
+
if (dumpHomepages) {
if (datum.hasHomepageUrl()) {
context.write(new Text(datum.getHomepageUrl()), emptyText);
}
return;
}
-
+
if (dumpHostnames) {
context.write(key, emptyText);
return;
}
-
+
// Write anyway
context.write(key, new Text(datum.toString()));
}
}
- // Todo, reduce unknown hosts to single unknown domain if possible. Enable via configuration
+ // Todo, reduce unknown hosts to single unknown domain if possible. Enable via
+ // configuration
// host_a.example.org,host_a.example.org ==> example.org
-// static class ReadHostDbReducer extends Reduce {
-// public void setup(Context context) { }
-//
-// public void reduce(Text domain, Iterable hosts, Context context) throws IOException, InterruptedException {
-//
-// }
-// }
-
- private void readHostDb(Path hostDb, Path output, boolean dumpHomepages, boolean dumpHostnames, String expr) throws Exception {
+ // static class ReadHostDbReducer extends Reduce {
+ // public void setup(Context context) { }
+ //
+ // public void reduce(Text domain, Iterable hosts, Context context)
+ // throws IOException, InterruptedException {
+ //
+ // }
+ // }
+
+ private void readHostDb(Path hostDb, Path output, boolean dumpHomepages,
+ boolean dumpHostnames, String expr) throws Exception {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
long start = System.currentTimeMillis();
LOG.info("ReadHostDb: starting at " + sdf.format(start));
@@ -182,7 +181,7 @@ private void readHostDb(Path hostDb, Path output, boolean dumpHomepages, boolean
}
conf.setBoolean("mapreduce.fileoutputcommitter.marksuccessfuljobs", false);
conf.set("mapred.textoutputformat.separator", "\t");
-
+
Job job = Job.getInstance(conf);
job.setJobName("ReadHostDb");
job.setJarByClass(ReadHostDb.class);
@@ -217,22 +216,24 @@ private void readHostDb(Path hostDb, Path output, boolean dumpHomepages, boolean
}
long end = System.currentTimeMillis();
- LOG.info("ReadHostDb: finished at " + sdf.format(end) + ", elapsed: " + TimingUtil.elapsedTime(start, end));
+ LOG.info("ReadHostDb: finished at " + sdf.format(end) + ", elapsed: "
+ + TimingUtil.elapsedTime(start, end));
}
-
+
private void getHostDbRecord(Path hostDb, String host) throws Exception {
Configuration conf = getConf();
- SequenceFile.Reader[] readers = SequenceFileOutputFormat.getReaders(conf, hostDb);
+ SequenceFile.Reader[] readers = SequenceFileOutputFormat.getReaders(conf,
+ hostDb);
Class> keyClass = readers[0].getKeyClass();
Class> valueClass = readers[0].getValueClass();
-
+
if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
throw new IOException("Incompatible key (" + keyClass.getName() + ")");
-
+
Text key = (Text) keyClass.newInstance();
HostDatum value = (HostDatum) valueClass.newInstance();
-
+
for (int i = 0; i < readers.length; i++) {
while (readers[i].next(key, value)) {
if (host.equals(key.toString())) {
@@ -240,17 +241,19 @@ private void getHostDbRecord(Path hostDb, String host) throws Exception {
}
}
readers[i].close();
- }
+ }
}
public static void main(String args[]) throws Exception {
- int res = ToolRunner.run(NutchConfiguration.create(), new ReadHostDb(), args);
+ int res = ToolRunner.run(NutchConfiguration.create(), new ReadHostDb(),
+ args);
System.exit(res);
}
public int run(String[] args) throws Exception {
if (args.length < 2) {
- System.err.println("Usage: ReadHostDb [-get ] [