Author: tpalsulich
Date: Mon Dec 29 18:46:41 2014
New Revision: 1648407

URL: http://svn.apache.org/r1648407
Log:
OODT-762. Fix crawler directory layout.

Added:
    oodt/trunk/crawler/src/test/java/
    oodt/trunk/crawler/src/test/java/org/
    oodt/trunk/crawler/src/test/java/org/apache/
    oodt/trunk/crawler/src/test/java/org/apache/oodt/
    oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/
    oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/
    
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/StateAwareProductCrawler.java
    
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/TestProductCrawler.java
    oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/cli/
    oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/cli/option/
    
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/cli/option/handler/
    
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/cli/option/handler/TestCrawlerBeansPropHandler.java
    oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/option/
    
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/option/TestBooleanOptions.java
    oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/typedetection/
    
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/typedetection/TestMimeExtractorConfigReader.java

Added: 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/StateAwareProductCrawler.java
URL: 
http://svn.apache.org/viewvc/oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/StateAwareProductCrawler.java?rev=1648407&view=auto
==============================================================================
--- 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/StateAwareProductCrawler.java
 (added)
+++ 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/StateAwareProductCrawler.java
 Mon Dec 29 18:46:41 2014
@@ -0,0 +1,166 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oodt.cas.crawl;
+
+//JDK imports
+import java.io.File;
+
+//OODT imports
+import org.apache.oodt.cas.metadata.Metadata;
+
+/**
+ * {@link ProductCrawler} used for testing the appropriated states are
+ * entered for each part of the crawler's workflow.
+ *
+ * @author bfoster (Brian Foster)
+ */
+public class StateAwareProductCrawler extends ProductCrawler{
+
+   private boolean passPreconditions = true;
+   private boolean passExtraction = true;
+   private boolean passRenaming = true;
+   private boolean passRequiredMetadata = true;
+   private boolean passPreIngestActions = true;
+   private boolean passIngest = true;
+
+   private boolean ranPreconditions = false;
+   private boolean ranExtraction = false;
+   private boolean ranRenaming = false;
+   private boolean ranRequiredMetadata = false;
+   private boolean ranPreIngestActions = false;
+   private boolean ranIngest = false;
+   private boolean ranPostIngestSuccessActions = false;
+   private boolean ranPostIngestFailActions = false;
+
+   public void markFailPreconditions() {
+      passPreconditions = false;
+   }
+   
+   public void markFailExtraction() {
+      passExtraction = false;
+   }
+
+   public void markFailRenaming() {
+      passRenaming = false;
+   }
+
+   public void markFailRequiredMetadata() {
+      passRequiredMetadata = false;
+   }
+
+   public void markFailPreIngestActions() {
+      passPreIngestActions = false;
+   }
+
+   public void markSkipIngest() {
+      this.setSkipIngest(true);
+   }
+
+   public void markFailIngest() {
+      passIngest = false;
+   }
+
+   public boolean ranPreconditions() {
+      return ranPreconditions;
+   }
+
+   public boolean ranExtraction() {
+      return ranExtraction;
+   }
+
+   public boolean ranRenaming() {
+      return ranRenaming;
+   }
+
+   public boolean ranRequiredMetadata() {
+      return ranRequiredMetadata;
+   }
+
+   public boolean ranPreIngestActions() {
+      return ranPreIngestActions;
+   }
+
+   public boolean ranIngest() {
+      return ranIngest;
+   }
+
+   public boolean ranPostIngestSuccessActions() {
+      return ranPostIngestSuccessActions;
+   }
+
+   public boolean ranPostIngestFailActions() {
+      return ranPostIngestFailActions;
+   }
+
+   @Override
+   protected boolean passesPreconditions(File p) {
+      ranPreconditions = true;
+      return passPreconditions;
+   }
+
+   @Override
+   protected Metadata getMetadataForProduct(File p)
+      throws Exception {
+      ranExtraction = true;
+      if (passExtraction) {
+         return new Metadata();
+      } else {
+         throw new Exception("Failed Extraction");
+      }
+   }
+
+   @Override
+   protected File renameProduct(File p, Metadata m)
+         throws Exception {
+      ranRenaming = true;
+      if (passRenaming) {
+         return p;
+      } else {
+         throw new Exception("Failed Renaming");
+      }
+   }
+   
+   @Override
+   boolean containsRequiredMetadata(Metadata m) {
+      ranRequiredMetadata = true;
+      return passRequiredMetadata;
+   }
+
+   @Override
+   boolean performPreIngestActions(File p, Metadata m) {
+      ranPreIngestActions = true;
+      return passPreIngestActions;
+   }
+
+   @Override
+   boolean ingest(File p, Metadata m) {
+      ranIngest = true;
+      return passIngest;
+   }
+
+   @Override
+   boolean performPostIngestOnSuccessActions(File p, Metadata m) {
+      ranPostIngestSuccessActions = true;
+      return true;
+   }
+
+   @Override
+   boolean performPostIngestOnFailActions(File p, Metadata m) {
+      ranPostIngestFailActions = true;
+      return true;
+   }
+}

Added: 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/TestProductCrawler.java
URL: 
http://svn.apache.org/viewvc/oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/TestProductCrawler.java?rev=1648407&view=auto
==============================================================================
--- 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/TestProductCrawler.java
 (added)
+++ 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/TestProductCrawler.java
 Mon Dec 29 18:46:41 2014
@@ -0,0 +1,627 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oodt.cas.crawl;
+
+//EasyMock static imports
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+//JDK imports
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Collections;
+
+//OODT imports
+import org.apache.oodt.cas.crawl.action.CrawlerAction;
+import org.apache.oodt.cas.crawl.action.CrawlerActionRepo;
+import org.apache.oodt.cas.crawl.status.IngestStatus;
+import org.apache.oodt.cas.crawl.structs.exceptions.CrawlerActionException;
+import org.apache.oodt.cas.filemgr.datatransfer.LocalDataTransferFactory;
+import org.apache.oodt.cas.filemgr.ingest.Ingester;
+import org.apache.oodt.cas.filemgr.metadata.CoreMetKeys;
+import org.apache.oodt.cas.filemgr.structs.exceptions.IngestException;
+import org.apache.oodt.cas.metadata.Metadata;
+
+//Spring imports
+import org.springframework.context.support.FileSystemXmlApplicationContext;
+
+//Google imports
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+//JUnit imports
+import junit.framework.TestCase;
+
+/**
+ * Test class for {@link ProductCrawler}.
+ *
+ * @author bfoster (Brian Foster)
+ */
+public class TestProductCrawler extends TestCase {
+
+   private static final String CRAWLER_CONFIG =
+      "src/main/resources/crawler-config.xml";
+
+   // Case1:
+   //  - Preconditions: fail
+   public void testHandleFileCase1() {
+      File p = new File("/tmp/data.dat");
+
+      // Setup Crawler.
+      StateAwareProductCrawler pc = new StateAwareProductCrawler();
+      pc.markFailPreconditions();
+
+      // Run Crawler.
+      IngestStatus status = pc.handleFile(p);
+
+      // Verify IngestStatus.
+      assertEquals(IngestStatus.Result.PRECONDS_FAILED, status.getResult());
+      assertEquals(p, status.getProduct());
+
+      // Verify correct methods were run. 
+      assertTrue(pc.ranPreconditions());
+      assertFalse(pc.ranExtraction());
+      assertFalse(pc.ranRenaming());
+      assertFalse(pc.ranRequiredMetadata());
+      assertFalse(pc.ranPreIngestActions());
+      assertFalse(pc.ranIngest());
+      assertFalse(pc.ranPostIngestSuccessActions());
+      assertFalse(pc.ranPostIngestFailActions());
+   }
+
+   // Case2:
+   //  - Preconditions: pass
+   //  - FailExtraction: fail
+   public void testHandleFileCase2() {
+      File p = new File("/tmp/data.dat");
+
+      // Setup Crawler.
+      StateAwareProductCrawler pc = new StateAwareProductCrawler();
+      pc.markFailExtraction();
+
+      // Run Crawler.
+      IngestStatus status = pc.handleFile(p);
+
+      // Verify IngestStatus.
+      assertEquals(IngestStatus.Result.FAILURE, status.getResult());
+      assertEquals(p, status.getProduct());
+
+      // Verify correct methods were run. 
+      assertTrue(pc.ranPreconditions());
+      assertTrue(pc.ranExtraction());
+      assertFalse(pc.ranRenaming());
+      assertFalse(pc.ranRequiredMetadata());
+      assertFalse(pc.ranPreIngestActions());
+      assertFalse(pc.ranIngest());
+      assertFalse(pc.ranPostIngestSuccessActions());
+      assertTrue(pc.ranPostIngestFailActions());
+   }
+
+   // Case3:
+   //  - Preconditions: pass
+   //  - FailExtraction: pass
+   //  - RenameProduct: fail
+   public void testHandleFileCase3() {
+      File p = new File("/tmp/data.dat");
+
+      // Setup Crawler.
+      StateAwareProductCrawler pc = new StateAwareProductCrawler();
+      pc.markFailRenaming();
+
+      // Run Crawler.
+      IngestStatus status = pc.handleFile(p);
+
+      // Verify IngestStatus.
+      assertEquals(IngestStatus.Result.FAILURE, status.getResult());
+      assertEquals(p, status.getProduct());
+
+      // Verify correct methods were run. 
+      assertTrue(pc.ranPreconditions());
+      assertTrue(pc.ranExtraction());
+      assertTrue(pc.ranRenaming());
+      assertFalse(pc.ranRequiredMetadata());
+      assertFalse(pc.ranPreIngestActions());
+      assertFalse(pc.ranIngest());
+      assertFalse(pc.ranPostIngestSuccessActions());
+      assertTrue(pc.ranPostIngestFailActions());
+   }
+
+   // Case4:
+   //  - Preconditions: pass
+   //  - FailExtraction: pass
+   //  - RenameProduct: pass
+   //  - RequiredMetadata: fail
+   public void testHandleFileCase4() {
+      File p = new File("/tmp/data.dat");
+
+      // Setup Crawler.
+      StateAwareProductCrawler pc = new StateAwareProductCrawler();
+      pc.markFailRequiredMetadata();
+
+      // Run Crawler.
+      IngestStatus status = pc.handleFile(p);
+
+      // Verify IngestStatus.
+      assertEquals(IngestStatus.Result.FAILURE, status.getResult());
+      assertEquals(p, status.getProduct());
+
+      // Verify correct methods were run. 
+      assertTrue(pc.ranPreconditions());
+      assertTrue(pc.ranExtraction());
+      assertTrue(pc.ranRenaming());
+      assertTrue(pc.ranRequiredMetadata());
+      assertFalse(pc.ranPreIngestActions());
+      assertFalse(pc.ranIngest());
+      assertFalse(pc.ranPostIngestSuccessActions());
+      assertTrue(pc.ranPostIngestFailActions());
+   }
+
+   // Case5:
+   //  - Preconditions: pass
+   //  - FailExtraction: pass
+   //  - RenameProduct: pass
+   //  - RequiredMetadata: pass
+   //  - PreIngestActions: fail
+   public void testHandleFileCase5() {
+      File p = new File("/tmp/data.dat");
+
+      // Setup Crawler.
+      StateAwareProductCrawler pc = new StateAwareProductCrawler();
+      pc.markFailPreIngestActions();
+
+      // Run Crawler.
+      IngestStatus status = pc.handleFile(p);
+
+      // Verify IngestStatus.
+      assertEquals(IngestStatus.Result.FAILURE, status.getResult());
+      assertEquals(p, status.getProduct());
+
+      // Verify correct methods were run. 
+      assertTrue(pc.ranPreconditions());
+      assertTrue(pc.ranExtraction());
+      assertTrue(pc.ranRenaming());
+      assertTrue(pc.ranRequiredMetadata());
+      assertTrue(pc.ranPreIngestActions());
+      assertFalse(pc.ranIngest());
+      assertFalse(pc.ranPostIngestSuccessActions());
+      assertTrue(pc.ranPostIngestFailActions());
+   }
+
+   // Case6:
+   //  - Preconditions: pass
+   //  - FailExtraction: pass
+   //  - RenameProduct: pass
+   //  - RequiredMetadata: pass
+   //  - PreIngestActions: pass
+   //  - SkipIngest: true
+   public void testHandleFileCase6() {
+      File p = new File("/tmp/data.dat");
+
+      // Setup Crawler.
+      StateAwareProductCrawler pc = new StateAwareProductCrawler();
+      pc.markSkipIngest();
+
+      // Run Crawler.
+      IngestStatus status = pc.handleFile(p);
+
+      // Verify IngestStatus.
+      assertEquals(IngestStatus.Result.SKIPPED, status.getResult());
+      assertEquals(p, status.getProduct());
+
+      // Verify correct methods were run. 
+      assertTrue(pc.ranPreconditions());
+      assertTrue(pc.ranExtraction());
+      assertTrue(pc.ranRenaming());
+      assertTrue(pc.ranRequiredMetadata());
+      assertTrue(pc.ranPreIngestActions());
+      assertFalse(pc.ranIngest());
+      assertFalse(pc.ranPostIngestSuccessActions());
+      assertFalse(pc.ranPostIngestFailActions());
+   }
+
+   // Case7:
+   //  - Preconditions: pass
+   //  - FailExtraction: pass
+   //  - RenameProduct: pass
+   //  - RequiredMetadata: pass
+   //  - PreIngestActions: pass
+   //  - SkipIngest: false
+   //  - Ingest: fail
+   public void testHandleFileCase7() {
+      File p = new File("/tmp/data.dat");
+
+      // Setup Crawler.
+      StateAwareProductCrawler pc = new StateAwareProductCrawler();
+      pc.markFailIngest();
+
+      // Run Crawler.
+      IngestStatus status = pc.handleFile(p);
+
+      // Verify IngestStatus.
+      assertEquals(IngestStatus.Result.FAILURE, status.getResult());
+      assertEquals(p, status.getProduct());
+
+      // Verify correct methods were run. 
+      assertTrue(pc.ranPreconditions());
+      assertTrue(pc.ranExtraction());
+      assertTrue(pc.ranRenaming());
+      assertTrue(pc.ranRequiredMetadata());
+      assertTrue(pc.ranPreIngestActions());
+      assertTrue(pc.ranIngest());
+      assertFalse(pc.ranPostIngestSuccessActions());
+      assertTrue(pc.ranPostIngestFailActions());
+   }
+
+   // Case8:
+   //  - Preconditions: pass
+   //  - FailExtraction: pass
+   //  - RenameProduct: pass
+   //  - RequiredMetadata: pass
+   //  - PreIngestActions: pass
+   //  - SkipIngest: false
+   //  - Ingest: pass
+   public void testHandleFileCase8() {
+      File p = new File("/tmp/data.dat");
+
+      // Setup Crawler.
+      StateAwareProductCrawler pc = new StateAwareProductCrawler();
+
+      // Run Crawler.
+      IngestStatus status = pc.handleFile(p);
+
+      // Verify IngestStatus.
+      assertEquals(IngestStatus.Result.SUCCESS, status.getResult());
+      assertEquals(p, status.getProduct());
+
+      // Verify correct methods were run. 
+      assertTrue(pc.ranPreconditions());
+      assertTrue(pc.ranExtraction());
+      assertTrue(pc.ranRenaming());
+      assertTrue(pc.ranRequiredMetadata());
+      assertTrue(pc.ranPreIngestActions());
+      assertTrue(pc.ranIngest());
+      assertTrue(pc.ranPostIngestSuccessActions());
+      assertFalse(pc.ranPostIngestFailActions());
+   }
+
+   public void testSetupIngester() {
+      ProductCrawler pc = createDummyCrawler();
+      
pc.setClientTransferer(LocalDataTransferFactory.class.getCanonicalName());
+      pc.setupIngester();
+      assertNotNull(pc.ingester);
+   }
+
+   public void testLoadAndValidateActions() {
+      ProductCrawler pc = createDummyCrawler();
+      pc.setApplicationContext(new FileSystemXmlApplicationContext(
+            CRAWLER_CONFIG));
+      pc.loadAndValidateActions();
+      assertEquals(0, pc.actionRepo.getActions().size());
+
+      pc = createDummyCrawler();
+      pc.setApplicationContext(new FileSystemXmlApplicationContext(
+            CRAWLER_CONFIG));
+      pc.setActionIds(Lists.newArrayList("Unique", "DeleteDataFile"));
+      pc.loadAndValidateActions();
+      assertEquals(Sets.newHashSet(
+            pc.getApplicationContext().getBean("Unique"),
+            pc.getApplicationContext().getBean("DeleteDataFile")),
+            pc.actionRepo.getActions());
+   }
+
+   public void testValidateActions() throws CrawlerActionException {
+      // Test case invalid action.
+      ProductCrawler pc = createDummyCrawler();
+      pc.actionRepo = createMock(CrawlerActionRepo.class);
+
+      CrawlerAction action = createMock(CrawlerAction.class);
+      action.validate();
+      expectLastCall().andThrow(new CrawlerActionException());
+      expect(action.getId()).andReturn("ActionId");
+      replay(action);
+
+      expect(pc.actionRepo.getActions()).andReturn(
+            Sets.newHashSet(action));
+      replay(pc.actionRepo);
+      try {
+         pc.validateActions();
+         fail("Should have thrown RuntimeException");
+      } catch (RuntimeException e) { /* expect throw */ }
+      verify(pc.actionRepo);
+      verify(action);
+
+      // Test case valid action.
+      pc = createDummyCrawler();
+      pc.actionRepo = createMock(CrawlerActionRepo.class);
+      action = createMock(CrawlerAction.class);
+      expect(pc.actionRepo.getActions()).andReturn(
+            Sets.newHashSet(action));
+      action.validate();
+      replay(pc.actionRepo);
+      replay(action);
+      pc.validateActions();
+      verify(pc.actionRepo);
+      verify(action);
+   }
+
+   public void testContainsRequiredMetadata() {
+      ProductCrawler pc = createDummyCrawler();
+      Metadata m = new Metadata();
+      m.replaceMetadata(CoreMetKeys.PRODUCT_TYPE, "GenericFile");
+      m.replaceMetadata(CoreMetKeys.FILENAME, "TestFile.txt");
+      m.replaceMetadata(CoreMetKeys.FILE_LOCATION, "/tmp/dir");
+      m.replaceMetadata(CoreMetKeys.FILE_SIZE, "0");
+      assertTrue(pc.containsRequiredMetadata(m));
+      assertFalse(pc.containsRequiredMetadata(new Metadata()));
+   }
+
+   public void testAddKnowMetadata() {
+      File p = new File("/tmp/data.dat");
+      Metadata m = new Metadata();
+      ProductCrawler pc = createDummyCrawler();
+      pc.addKnownMetadata(p, m);
+      assertEquals(4, m.getAllKeys().size());
+      assertEquals(p.getName(), m.getMetadata(CoreMetKeys.PRODUCT_NAME));
+      assertEquals(p.getName(), m.getMetadata(CoreMetKeys.FILENAME));
+      assertEquals(p.getParentFile().getAbsolutePath(),
+            m.getMetadata(CoreMetKeys.FILE_LOCATION));
+      assertEquals(String.valueOf(p.length()), 
m.getMetadata(CoreMetKeys.FILE_SIZE));
+   }
+
+   public void testCreateIngestStatus() {
+      File p = new File("/tmp/data.dat");
+      IngestStatus.Result result = IngestStatus.Result.SUCCESS;
+      String message = "Ingest OK";
+      ProductCrawler pc = createDummyCrawler();
+      IngestStatus status = pc.createIngestStatus(p, result, message);
+      assertEquals(p, status.getProduct());
+      assertEquals(result, status.getResult());
+      assertEquals(message, status.getMessage());
+   }
+
+   public void testIngest() throws MalformedURLException, IngestException {
+      File p = new File("/tmp/data.dat");
+      Metadata m = new Metadata();
+
+      // Test successful ingest.
+      ProductCrawler pc = createDummyCrawler();
+      pc.setFilemgrUrl("http://localhost:9000";);
+      pc.ingester = createMock(Ingester.class);
+      expect(pc.ingester.ingest(new URL("http://localhost:9000";), p, m))
+         .andReturn("TestProductId");
+      replay(pc.ingester);
+      assertTrue(pc.ingest(p, m));
+      verify(pc.ingester);
+
+      // Test failed ingest.
+      pc = createDummyCrawler();
+      pc.setFilemgrUrl("http://localhost:9000";);
+      pc.ingester = createMock(Ingester.class);
+      expect(pc.ingester.ingest(new URL("http://localhost:9000";), p, m))
+         .andThrow(new IngestException());
+      replay(pc.ingester);
+      assertFalse(pc.ingest(p, m));
+      verify(pc.ingester);
+   }
+
+   public void testPerformPreIngestActions() throws CrawlerActionException {
+      ProductCrawler pc = createDummyCrawler();
+      File p = new File("/tmp/data.dat");
+      Metadata m = new Metadata();
+
+      // Test actionRepo == null.
+      assertTrue(pc.performPreIngestActions(p, m));
+
+      // Test actionRepo != null and performAction return true.
+      CrawlerAction action = createMock(CrawlerAction.class);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      expect(action.performAction(p, m)).andReturn(true);
+      replay(action);
+
+      pc.actionRepo = createMock(CrawlerActionRepo.class);
+      expect(pc.actionRepo.getPreIngestActions())
+         .andReturn(Lists.newArrayList(action));
+      replay(pc.actionRepo);
+
+      assertTrue(pc.performPreIngestActions(p, m));
+      verify(action);
+      verify(pc.actionRepo);
+
+      // Test actionRepo != null and performAction return false.
+      action = createMock(CrawlerAction.class);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      expect(action.performAction(p, m)).andReturn(false);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      replay(action);
+
+      pc.actionRepo = createMock(CrawlerActionRepo.class);
+      expect(pc.actionRepo.getPreIngestActions())
+         .andReturn(Lists.newArrayList(action));
+      replay(pc.actionRepo);
+
+      assertFalse(pc.performPreIngestActions(p, m));
+      verify(action);
+      verify(pc.actionRepo);
+   }
+
+   public void testPerformPostIngestOnSuccessActions() throws 
CrawlerActionException {
+      ProductCrawler pc = createDummyCrawler();
+      File p = new File("/tmp/data.dat");
+      Metadata m = new Metadata();
+
+      // Test actionRepo == null.
+      assertTrue(pc.performPostIngestOnSuccessActions(p, m));
+
+      // Test actionRepo != null and performAction return true.
+      CrawlerAction action = createMock(CrawlerAction.class);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      expect(action.performAction(p, m)).andReturn(true);
+      replay(action);
+
+      pc.actionRepo = createMock(CrawlerActionRepo.class);
+      expect(pc.actionRepo.getPostIngestOnSuccessActions())
+         .andReturn(Lists.newArrayList(action));
+      replay(pc.actionRepo);
+
+      assertTrue(pc.performPostIngestOnSuccessActions(p, m));
+      verify(action);
+      verify(pc.actionRepo);
+
+      // Test actionRepo != null and performAction return false.
+      action = createMock(CrawlerAction.class);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      expect(action.performAction(p, m)).andReturn(false);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      replay(action);
+
+      pc.actionRepo = createMock(CrawlerActionRepo.class);
+      expect(pc.actionRepo.getPostIngestOnSuccessActions())
+         .andReturn(Lists.newArrayList(action));
+      replay(pc.actionRepo);
+
+      assertFalse(pc.performPostIngestOnSuccessActions(p, m));
+      verify(action);
+      verify(pc.actionRepo);
+   }
+
+   public void testPerformPostIngestOnFailActions() throws 
CrawlerActionException {
+      ProductCrawler pc = createDummyCrawler();
+      File p = new File("/tmp/data.dat");
+      Metadata m = new Metadata();
+
+      // Test actionRepo == null.
+      assertTrue(pc.performPostIngestOnFailActions(p, m));
+
+      // Test actionRepo != null and performAction return true.
+      CrawlerAction action = createMock(CrawlerAction.class);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      expect(action.performAction(p, m)).andReturn(true);
+      replay(action);
+
+      pc.actionRepo = createMock(CrawlerActionRepo.class);
+      expect(pc.actionRepo.getPostIngestOnFailActions())
+         .andReturn(Lists.newArrayList(action));
+      replay(pc.actionRepo);
+
+      assertTrue(pc.performPostIngestOnFailActions(p, m));
+      verify(action);
+      verify(pc.actionRepo);
+
+      // Test actionRepo != null and performAction return false.
+      action = createMock(CrawlerAction.class);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      expect(action.performAction(p, m)).andReturn(false);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      replay(action);
+
+      pc.actionRepo = createMock(CrawlerActionRepo.class);
+      expect(pc.actionRepo.getPostIngestOnFailActions())
+         .andReturn(Lists.newArrayList(action));
+      replay(pc.actionRepo);
+
+      assertFalse(pc.performPostIngestOnFailActions(p, m));
+      verify(action);
+      verify(pc.actionRepo);
+   }
+
+   public void testPerformProductCrawlerActions() throws 
CrawlerActionException {
+      ProductCrawler pc = createDummyCrawler();
+      File p = new File("/tmp/data.dat");
+      Metadata m = new Metadata();
+
+      // Test no actions.
+      assertTrue(pc.performProductCrawlerActions(
+            Collections.<CrawlerAction>emptyList(), p, m));
+
+      // Test 1 action pass.
+      CrawlerAction action = createMock(CrawlerAction.class);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      expect(action.performAction(p, m)).andReturn(true);
+      replay(action);
+      assertTrue(pc.performProductCrawlerActions(
+            Lists.newArrayList(action), p, m));
+      verify(action);
+
+      // Test 1 action fail.
+      action = createMock(CrawlerAction.class);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      expect(action.performAction(p, m)).andReturn(false);
+      expect(action.getId()).andReturn("ActionId");
+      expect(action.getDescription()).andReturn("Action Description");
+      replay(action);
+      assertFalse(pc.performProductCrawlerActions(
+            Lists.newArrayList(action), p, m));
+      verify(action);
+
+      // Test 1 action pass and 1 action fail.
+      CrawlerAction passAction = createMock(CrawlerAction.class);
+      expect(passAction.getId()).andReturn("ActionId");
+      expect(passAction.getDescription()).andReturn("Action Description");
+      expect(passAction.performAction(p, m)).andReturn(true);
+      replay(passAction);
+      CrawlerAction failAction = createMock(CrawlerAction.class);
+      expect(failAction.getId()).andReturn("ActionId");
+      expect(failAction.getDescription()).andReturn("Action Description");
+      expect(failAction.performAction(p, m)).andReturn(false);
+      expect(failAction.getId()).andReturn("ActionId");
+      expect(failAction.getDescription()).andReturn("Action Description");
+      replay(failAction);
+      assertFalse(pc.performProductCrawlerActions(
+            Lists.newArrayList(passAction, failAction), p, m));
+      verify(passAction);
+      verify(failAction);
+   }
+
+   private static ProductCrawler createDummyCrawler() {
+      return createDummyCrawler(true, new Metadata(), null); 
+   }
+
+   private static ProductCrawler createDummyCrawler(
+         final boolean passesPreconditions, final Metadata productMetadata,
+         final File renamedFile) {
+      return new ProductCrawler() {
+         @Override
+         protected boolean passesPreconditions(File product) {
+            return passesPreconditions;
+         }
+         @Override
+         protected Metadata getMetadataForProduct(File product) {
+            return productMetadata;
+         }
+         @Override
+         protected File renameProduct(File product, Metadata productMetadata)
+               throws Exception {
+            return renamedFile == null ? product : renamedFile;
+         }
+      };
+   }    
+}

Added: 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/cli/option/handler/TestCrawlerBeansPropHandler.java
URL: 
http://svn.apache.org/viewvc/oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/cli/option/handler/TestCrawlerBeansPropHandler.java?rev=1648407&view=auto
==============================================================================
--- 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/cli/option/handler/TestCrawlerBeansPropHandler.java
 (added)
+++ 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/cli/option/handler/TestCrawlerBeansPropHandler.java
 Mon Dec 29 18:46:41 2014
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oodt.cas.crawl.cli.option.handler;
+
+//OODT static imports
+import static 
org.apache.oodt.cas.crawl.util.ActionBeanProperties.getProperties;
+
+//JDK imports
+import java.util.Collections;
+
+//OODT imports
+import org.apache.oodt.cas.cli.option.AdvancedCmdLineOption;
+import org.apache.oodt.cas.cli.option.CmdLineOptionInstance;
+
+//Google imports
+import com.google.common.collect.Lists;
+
+//JUnit imports
+import junit.framework.TestCase;
+
+/**
+ * Test class for {@link CrawlerBeansPropHandler}.
+ *
+ * @author bfoster (Brian Foster)
+ */
+public class TestCrawlerBeansPropHandler extends TestCase {
+
+   public void testHandleOption() {
+      AdvancedCmdLineOption option = new AdvancedCmdLineOption();
+      option.setShortOption("t");
+      option.setLongOption("test");
+      option.setHasArgs(true);
+
+      // Test 1 value specified.
+      CmdLineOptionInstance instance = new CmdLineOptionInstance(
+            option, Lists.newArrayList("value"));
+      CrawlerBeansPropHandler handler = new CrawlerBeansPropHandler();
+      handler.initialize(option);
+      handler.setProperties(Lists.newArrayList("TestBean.prop"));
+      handler.handleOption(null, instance);
+      assertEquals(1, getProperties().size());
+      assertEquals("value", getProperties().getProperty("TestBean.prop"));
+      getProperties().clear();
+
+      // Test multiple values specified.
+      instance = new CmdLineOptionInstance(
+            option, Lists.newArrayList("value1", "value2"));
+      handler = new CrawlerBeansPropHandler();
+      handler.initialize(option);
+      handler.setProperties(Lists.newArrayList("TestBean.prop"));
+      handler.handleOption(null, instance);
+      assertEquals(2, getProperties().size());
+      assertEquals("value1", getProperties().getProperty("TestBean.prop[0]"));
+      assertEquals("value2", getProperties().getProperty("TestBean.prop[1]"));
+      getProperties().clear();
+
+      // Test no values specified.
+      try {
+         instance = new CmdLineOptionInstance(
+               option, Collections.<String>emptyList());
+         handler = new CrawlerBeansPropHandler();
+         handler.initialize(option);
+         handler.setProperties(Lists.newArrayList("TestBean.prop"));
+         handler.handleOption(null, instance);
+         fail("Should have thrown RuntimeException");
+      } catch (RuntimeException e) { /* expect throw */ }
+   }
+}

Added: 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/option/TestBooleanOptions.java
URL: 
http://svn.apache.org/viewvc/oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/option/TestBooleanOptions.java?rev=1648407&view=auto
==============================================================================
--- 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/option/TestBooleanOptions.java
 (added)
+++ 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/option/TestBooleanOptions.java
 Mon Dec 29 18:46:41 2014
@@ -0,0 +1,60 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more 
contributor
+// license agreements.  See the NOTICE.txt file distributed with this work for
+// additional information regarding copyright ownership.  The ASF licenses this
+// file to you under the Apache License, Version 2.0 (the "License"); you may 
not
+// use this file except in compliance with the License.  You may obtain a copy 
of
+// the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
+// License for the specific language governing permissions and limitations 
under
+// the License.
+
+package org.apache.oodt.cas.crawl.option;
+
+//Junit imports
+import junit.framework.TestCase;
+
+//OODT imports
+import org.apache.oodt.cas.crawl.MetExtractorProductCrawler;
+import org.apache.oodt.cas.crawl.ProductCrawler; // for javadoc
+
+/**
+ * @author mattmann
+ * @version $Revision$
+ * @since OODT-241
+ * 
+ * <p>
+ * Class ensures that boolean options such as --noRecur and --crawlForDirs are
+ * settable in {@link ProductCrawler} derivatives
+ * </p>.
+ */
+public final class TestBooleanOptions extends TestCase {
+
+    public void testSetBooleanOptions() {
+        MetExtractorProductCrawler crawler = new MetExtractorProductCrawler();
+        try {
+            crawler.getClass().getMethod("setNoRecur",
+                    new Class[] { boolean.class }).invoke(crawler,
+                    new Object[] { new Boolean(true) });
+        } catch (Exception e) {
+            fail(e.getMessage());
+        }
+
+        try {
+            crawler.getClass().getMethod("setCrawlForDirs",
+                    new Class[] { boolean.class }).invoke(crawler,
+                    new Object[] { new Boolean(true) });
+        } catch (Exception e) {
+            fail(e.getMessage());
+        }
+
+        assertTrue(crawler.isNoRecur());
+        assertTrue(crawler.isCrawlForDirs());
+
+    }
+
+}

Added: 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/typedetection/TestMimeExtractorConfigReader.java
URL: 
http://svn.apache.org/viewvc/oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/typedetection/TestMimeExtractorConfigReader.java?rev=1648407&view=auto
==============================================================================
--- 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/typedetection/TestMimeExtractorConfigReader.java
 (added)
+++ 
oodt/trunk/crawler/src/test/java/org/apache/oodt/cas/crawl/typedetection/TestMimeExtractorConfigReader.java
 Mon Dec 29 18:46:41 2014
@@ -0,0 +1,158 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oodt.cas.crawl.typedetection;
+
+//JDK imports
+import java.io.File;
+import java.util.List;
+import java.util.UUID;
+
+//Apache imports
+import org.apache.commons.io.FileUtils;
+
+//OODT imports
+import org.apache.oodt.cas.metadata.extractors.CopyAndRewriteExtractor;
+import org.apache.oodt.cas.metadata.extractors.MetReaderExtractor;
+import org.apache.oodt.cas.metadata.filenaming.PathUtilsNamingConvention;
+
+//Google imports
+import com.google.common.collect.Lists;
+
+//JUnit imports
+import junit.framework.TestCase;
+
+/**
+ * Test class for {@link MimeExtractorConfigReader}.
+ *
+ * @author bfoster (Brian Foster)
+ */
+public class TestMimeExtractorConfigReader extends TestCase {
+
+   private File mimeTypesFile;
+   private File defaultExtractorConfig;
+   private File tmpDir;
+
+   @Override
+   public void setUp() throws Exception {
+      File tmpFile = File.createTempFile("bogus", "bogus");
+      tmpDir = new File(tmpFile.getParentFile(), UUID.randomUUID().toString());
+      tmpFile.delete();
+      if (!tmpDir.mkdirs()) {
+         throw new Exception("Failed to create temp directory");
+      }
+      mimeTypesFile = new File(tmpDir, "mime-types.xml");
+      FileUtils.touch(mimeTypesFile);
+      defaultExtractorConfig = new File(tmpDir, 
"default-extractor.properties");
+      FileUtils.touch(defaultExtractorConfig);
+   }
+
+   @Override
+   public void tearDown() throws Exception {
+      FileUtils.forceDelete(tmpDir);
+   }
+
+   public void testReadWithDefaults() throws Exception {
+      String namingConvId = "PathUtilsNC";
+      String defaultPreconditionId = "TestPrecondition";
+      String preconditionId1 = "Precondition1";
+      String preconditionId2 = "Precondition2";
+      String xmlFileContents =
+           "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+         + "<cas:mimetypemap xmlns:cas=\"http://oodt.jpl.nassa.gov/1.0/cas\"";
+               + " magic=\"false\" mimeRepo=\""
+               + mimeTypesFile.getAbsolutePath() + "\">\n"
+         + "<default>\n"
+         + "   <namingConvention id=\"" + namingConvId + "\" />\n"
+         + "   <extractor class=\""
+               + CopyAndRewriteExtractor.class.getCanonicalName() + "\">\n"
+         + "      <config file=\"" + defaultExtractorConfig.getAbsolutePath()
+                     + "\"/>\n"
+         + "      <preCondComparators>\n"
+         + "         <preCondComparator id=\"" + defaultPreconditionId + 
"\"/>\n"
+         + "      </preCondComparators>\n"
+         + "   </extractor>\n"
+         + "</default>\n"
+         + "<mime type=\"some/mime-type\">\n"
+         + "   <extractor class=\""
+               + MetReaderExtractor.class.getCanonicalName() + "\">\n"
+         + "      <config file=\"" + defaultExtractorConfig.getAbsolutePath()
+                     + "\"/>\n"
+         + "      <preCondComparators>\n"
+         + "         <preCondComparator id=\"" + preconditionId1 + "\"/>\n"
+         + "         <preCondComparator id=\"" + preconditionId2 + "\"/>\n"
+         + "      </preCondComparators>\n"
+         + "   </extractor>\n"
+         + "</mime>\n"
+         + "</cas:mimetypemap>\n";
+      File xmlMimeRepo = new File(tmpDir, "mime-repo.xml");
+      FileUtils.writeStringToFile(xmlMimeRepo, xmlFileContents, "UTF-8");
+      assertTrue(xmlMimeRepo.exists());
+      MimeExtractorRepo mimeRepo = MimeExtractorConfigReader.read(
+            xmlMimeRepo.getAbsolutePath());
+      assertEquals(namingConvId, 
mimeRepo.getNamingConventionId("some/mime-type"));
+      List<MetExtractorSpec> specs = 
mimeRepo.getExtractorSpecsForMimeType("some/mime-type");
+      assertEquals(1, specs.size());
+      assertEquals(MetReaderExtractor.class,
+            specs.get(0).getMetExtractor().getClass());
+      assertEquals(Lists.newArrayList(preconditionId1, preconditionId2),
+            specs.get(0).getPreCondComparatorIds());
+      specs = mimeRepo.getExtractorSpecsForMimeType("someother/mime-type");
+      assertEquals(1, specs.size());
+      assertEquals(CopyAndRewriteExtractor.class,
+            specs.get(0).getMetExtractor().getClass());
+      assertEquals(Lists.newArrayList(defaultPreconditionId),
+            specs.get(0).getPreCondComparatorIds());
+   }
+
+   public void testReadWithoutDefaults() throws Exception {
+      String namingConvId = "PathUtilsNC";
+      String preconditionId1 = "Precondition1";
+      String preconditionId2 = "Precondition2";
+      String xmlFileContents =
+           "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
+         + "<cas:mimetypemap xmlns:cas=\"http://oodt.jpl.nassa.gov/1.0/cas\"";
+               + " magic=\"false\" mimeRepo=\""
+               + mimeTypesFile.getAbsolutePath() + "\">\n"
+         + "<mime type=\"some/mime-type\">\n"
+         + "   <namingConvention id=\"" + namingConvId + "\" />\n"
+         + "   <extractor class=\""
+               + MetReaderExtractor.class.getCanonicalName() + "\">\n"
+         + "      <config file=\"" + defaultExtractorConfig.getAbsolutePath()
+                     + "\"/>\n"
+         + "      <preCondComparators>\n"
+         + "         <preCondComparator id=\"" + preconditionId1 + "\"/>\n"
+         + "         <preCondComparator id=\"" + preconditionId2 + "\"/>\n"
+         + "      </preCondComparators>\n"
+         + "   </extractor>\n"
+         + "</mime>\n"
+         + "</cas:mimetypemap>\n";
+      File xmlMimeRepo = new File(tmpDir, "mime-repo.xml");
+      FileUtils.writeStringToFile(xmlMimeRepo, xmlFileContents, "UTF-8");
+      assertTrue(xmlMimeRepo.exists());
+      MimeExtractorRepo mimeRepo = MimeExtractorConfigReader.read(
+            xmlMimeRepo.getAbsolutePath());
+      assertEquals(namingConvId, 
mimeRepo.getNamingConventionId("some/mime-type"));
+      List<MetExtractorSpec> specs = 
mimeRepo.getExtractorSpecsForMimeType("some/mime-type");
+      assertEquals(1, specs.size());
+      assertEquals(MetReaderExtractor.class,
+            specs.get(0).getMetExtractor().getClass());
+      assertEquals(Lists.newArrayList(preconditionId1, preconditionId2),
+            specs.get(0).getPreCondComparatorIds());
+      specs = mimeRepo.getExtractorSpecsForMimeType("someother/mime-type");
+      assertEquals(0, specs.size());
+   }
+}


Reply via email to