Nice work gents! ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Chris Mattmann, Ph.D. Chief Architect Instrument Software and Science Data Systems Section (398) NASA Jet Propulsion Laboratory Pasadena, CA 91109 USA Office: 168-519, Mailstop: 168-527 Email: [email protected] WWW: http://sunset.usc.edu/~mattmann/ ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Adjunct Associate Professor, Computer Science Department University of Southern California, Los Angeles, CA 90089 USA ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
-----Original Message----- From: "[email protected]" <[email protected]> Reply-To: "[email protected]" <[email protected]> Date: Tuesday, March 31, 2015 at 12:28 PM To: "[email protected]" <[email protected]> Subject: svn commit: r1670442 - /nutch/trunk/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java >Author: snagel >Date: Tue Mar 31 19:28:14 2015 >New Revision: 1670442 > >URL: http://svn.apache.org/r1670442 >Log: >NUTCH-1979 CrawlDbReader to implement Tool: fix unit test > >Modified: > nutch/trunk/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java > >Modified: >nutch/trunk/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java >URL: >http://svn.apache.org/viewvc/nutch/trunk/src/test/org/apache/nutch/crawl/T >estCrawlDbMerger.java?rev=1670442&r1=1670441&r2=1670442&view=diff >========================================================================== >==== >--- nutch/trunk/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java >(original) >+++ nutch/trunk/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java >Tue Mar 31 19:28:14 2015 >@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FileSystem; > import org.apache.hadoop.fs.Path; > import org.apache.hadoop.io.MapFile; > import org.apache.hadoop.io.Text; >+import org.apache.hadoop.mapred.JobConf; > import org.apache.nutch.util.NutchConfiguration; > import org.junit.After; > import org.junit.Assert; >@@ -113,7 +114,7 @@ public class TestCrawlDbMerger { > String url = it.next(); > LOG.fine("url=" + url); > CrawlDatum cd = expected.get(url); >- CrawlDatum res = reader.get(crawlDb, url, conf); >+ CrawlDatum res = reader.get(crawlDb, url, new JobConf(conf)); > LOG.fine(" -> " + res); > System.out.println("url=" + url); > System.out.println(" cd " + cd); > >

