[ 
https://issues.apache.org/jira/browse/AMBARI-24761?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16650206#comment-16650206
 ] 

ASF GitHub Bot commented on AMBARI-24761:
-----------------------------------------

kasakrisz closed pull request #6: AMBARI-24761 - Infra Manager: hive support 
for archiving Infra Solr
URL: https://github.com/apache/ambari-infra/pull/6
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git 
a/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story 
b/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
index 876019f..729d609 100644
--- a/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
+++ b/ambari-infra-manager-it/src/test/resources/stories/infra_api_tests.story
@@ -24,11 +24,11 @@ And solr contains 10 documents between 
2010-01-01T05:00:00.000Z and 2010-01-04T0
 Scenario: Archiving job fails when part of the data is exported. After 
resolving the issue and restarting the job exports the rest of the data.
 
 Given 200 documents in solr with logtime from 2011-10-09T05:00:00.000Z to 
2011-10-09T20:00:00.000Z
-And a file on s3 with key 
solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.tar.gz
+And a file on s3 with key 
solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.bz2
 When start archive_audit_logs job with parameters 
writeBlockSize=20,start=2010-11-09T00:00:00.000Z,end=2011-10-11T00:00:00.000Z 
after 2 seconds
 Then Check 3 files exists on s3 server with filenames containing the text 
solr_archive_audit_logs_-_2011-10-09 after 20 seconds
 And solr does not contain documents between 2011-10-09T05:00:00.000Z and 
2011-10-09T07:59:59.999Z after 5 seconds
-When delete file with key 
solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.tar.gz from s3
+When delete file with key 
solr_archive_audit_logs_-_2011-10-09T08-00-00.000Z.json.bz2 from s3
 And restart archive_audit_logs job within 2 seconds
 Then Check 10 files exists on s3 server with filenames containing the text 
solr_archive_audit_logs_-_2011-10-09 after 20 seconds
 And solr does not contain documents between 2011-10-09T05:00:00.000Z and 
2011-10-09T20:00:00.000Z after 5 seconds
diff --git 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/BZip2Compressor.java
 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/BZip2Compressor.java
new file mode 100644
index 0000000..ac1ca6b
--- /dev/null
+++ 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/BZip2Compressor.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.infra.job.archive;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+
+import 
org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
+import org.apache.commons.io.IOUtils;
+
+public class BZip2Compressor extends AbstractFileAction {
+  @Override
+  protected File onPerform(File inputFile) {
+    File bz2File = new File(inputFile.getParent(), inputFile.getName() + 
".bz2");
+    try (BZip2CompressorOutputStream bZip2CompressorOutputStream = new 
BZip2CompressorOutputStream(new FileOutputStream(bz2File))) {
+      try (FileInputStream fileInputStream = new FileInputStream(inputFile)) {
+        IOUtils.copy(fileInputStream, bZip2CompressorOutputStream);
+      }
+    }
+    catch (IOException ex) {
+      throw new UncheckedIOException(ex);
+    }
+    return bz2File;
+  }
+}
diff --git 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
index 5ff9587..acf19c0 100644
--- 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
+++ 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/Document.java
@@ -18,36 +18,37 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import com.fasterxml.jackson.annotation.JsonAnyGetter;
-import com.fasterxml.jackson.annotation.JsonAnySetter;
+import static java.util.Collections.unmodifiableMap;
 
 import java.util.HashMap;
 import java.util.Map;
 
-import static java.util.Collections.unmodifiableMap;
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
 
 public class Document {
-  private final Map<String, String> fieldMap;
+  private final Map<String, Object> fieldMap;
 
   private Document() {
     fieldMap = new HashMap<>();
   }
 
-  public Document(Map<String, String> fieldMap) {
+  public Document(Map<String, Object> fieldMap) {
     this.fieldMap = unmodifiableMap(fieldMap);
   }
 
-  public String get(String key) {
-    return fieldMap.get(key);
+  public String getString(String key) {
+    Object value = fieldMap.get(key);
+    return value == null ? null : value.toString();
   }
 
   @JsonAnyGetter
-  public Map<String, String> getFieldMap() {
+  public Map<String, Object> getFieldMap() {
     return fieldMap;
   }
 
   @JsonAnySetter
-  private void put(String key, String value) {
+  private void put(String key, Object value) {
     fieldMap.put(key, value);
   }
 }
diff --git 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
index eac31af..d9d40b1 100644
--- 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
+++ 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/DocumentArchivingConfiguration.java
@@ -94,7 +94,7 @@ public DocumentExporter documentExporter(DocumentItemReader 
documentItemReader,
                                            PasswordStore passwordStore) {
 
     File baseDir = new File(infraManagerDataConfig.getDataFolder(), 
"exporting");
-    CompositeFileAction fileAction = new CompositeFileAction(new 
TarGzCompressor());
+    CompositeFileAction fileAction = new CompositeFileAction(new 
BZip2Compressor());
     switch (parameters.getDestination()) {
       case S3:
         fileAction.add(new S3Uploader(
@@ -163,8 +163,8 @@ public DocumentItemReader reader(ObjectSource<Document> 
documentSource,
 
   @Bean
   @StepScope
-  public ObjectSource<Document> 
logSource(@Value("#{stepExecution.jobExecution.executionContext.get('" + 
PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters parameters,
-                                          SolrDAO solrDAO) {
+  public ObjectSource<Document> 
documentSource(@Value("#{stepExecution.jobExecution.executionContext.get('" + 
PARAMETERS_CONTEXT_KEY + "')}") ArchivingParameters parameters,
+                                               SolrDAO solrDAO) {
 
     return new SolrDocumentSource(solrDAO, parameters.getStart(), 
computeEnd(parameters.getEnd(), parameters.getTtl()));
   }
diff --git 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
index 0c879bd..b15d8b7 100644
--- 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
+++ 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatter.java
@@ -45,10 +45,10 @@ public FileNameSuffixFormatter(String columnName, String 
dateTimeFormat) {
   public String format(Document document) {
     requireNonNull(document, "Can not format file name suffix: input document 
is null!");
 
-    if (isBlank(document.get(columnName)))
+    if (isBlank(document.getString(columnName)))
       throw new IllegalArgumentException("The specified document does not have 
a column " + columnName + " or it's value is blank!");
 
-    return format(document.get(columnName));
+    return format(document.getString(columnName));
   }
 
   public String format(String value) {
diff --git 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
index f8d8382..d505934 100644
--- 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
+++ 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrDocumentIterator.java
@@ -18,11 +18,6 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.ambari.infra.job.CloseableIterator;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrDocument;
-
 import java.io.IOException;
 import java.io.UncheckedIOException;
 import java.text.DateFormat;
@@ -32,6 +27,11 @@
 import java.util.Iterator;
 import java.util.TimeZone;
 
+import org.apache.ambari.infra.job.CloseableIterator;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
+
 public class SolrDocumentIterator implements CloseableIterator<Document> {
 
   public static final String SOLR_DATE_FORMAT_TEXT = 
"yyyy-MM-dd'T'HH:mm:ss.SSSX";
@@ -56,21 +56,24 @@ public Document next() {
       return null;
     
     SolrDocument document = documentIterator.next();
-    HashMap<String, String> fieldMap = new HashMap<>();
+    HashMap<String, Object> fieldMap = new HashMap<>();
     for (String key : document.getFieldNames()) {
-      fieldMap.put(key, toString(document.get(key)));
+      fieldMap.put(key, convertFieldValue(document.get(key)));
     }
 
     return new Document(fieldMap);
   }
 
-  private String toString(Object value) {
+  private Object convertFieldValue(Object value) {
     if (value == null) {
       return null;
     }
     else if (value instanceof Date) {
       return SOLR_DATE_FORMAT.format(value);
     }
+    else if (value instanceof Integer || value instanceof Long) {
+      return value;
+    }
     else {
       return value.toString();
     }
diff --git 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java
 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java
index 9770982..87caeb8 100644
--- 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java
+++ 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrParametrizedString.java
@@ -18,14 +18,14 @@
  */
 package org.apache.ambari.infra.job.archive;
 
-import org.apache.solr.client.solrj.util.ClientUtils;
-
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.solr.client.solrj.util.ClientUtils;
+
 public class SolrParametrizedString {
   private static final String PARAMETER_PATTERN = "\\$\\{%s[a-z0-9A-Z]+}";
   private static final Pattern NO_PREFIX_PARAMETER_PATTERN = 
Pattern.compile(String.format(PARAMETER_PATTERN, ""));
@@ -49,16 +49,16 @@ public String toString() {
     return string;
   }
 
-  public SolrParametrizedString set(Map<String, String> parameterMap) {
+  public SolrParametrizedString set(Map<String, Object> parameterMap) {
     return set(NO_PREFIX_PARAMETER_PATTERN, null, parameterMap);
   }
 
-  public SolrParametrizedString set(String prefix, Map<String, String> 
parameterMap) {
+  public SolrParametrizedString set(String prefix, Map<String, Object> 
parameterMap) {
     String dottedPrefix = prefix + ".";
     return set(Pattern.compile(String.format(PARAMETER_PATTERN, 
dottedPrefix)), dottedPrefix, parameterMap);
   }
 
-  private SolrParametrizedString set(Pattern regExPattern, String prefix, 
Map<String, String> parameterMap) {
+  private SolrParametrizedString set(Pattern regExPattern, String prefix, 
Map<String, Object> parameterMap) {
     String newString = string;
     for (String paramName : collectParamNames(regExPattern)) {
       String paramSuffix = prefix == null ? paramName : 
paramName.replace(prefix, "");
@@ -68,10 +68,13 @@ private SolrParametrizedString set(Pattern regExPattern, 
String prefix, Map<Stri
     return new SolrParametrizedString(newString);
   }
 
-  private String getValue(Map<String, String> parameterMap, String 
paramSuffix) {
-    String value = parameterMap.get(paramSuffix);
-    if ("*".equals(value))
-      return value;
-    return ClientUtils.escapeQueryChars(value);
+  private String getValue(Map<String, Object> parameterMap, String 
paramSuffix) {
+    Object value = parameterMap.get(paramSuffix);
+    if (value == null)
+      throw new NullPointerException(String.format("Value can not be null 
parameterMap[%s]", paramSuffix));
+    String stringValue = value.toString();
+    if ("*".equals(stringValue))
+      return stringValue;
+    return ClientUtils.escapeQueryChars(stringValue);
   }
 }
diff --git 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
index 40771dc..f0563f9 100644
--- 
a/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
+++ 
b/ambari-infra-manager/src/main/java/org/apache/ambari/infra/job/archive/SolrQueryBuilder.java
@@ -47,7 +47,7 @@ public static String computeEnd(String end, OffsetDateTime 
now, Duration ttl) {
   private static final String INTERVAL_START = "start";
   private static final String INTERVAL_END = "end";
   private String queryText;
-  private final Map<String, String> interval;
+  private final Map<String, Object> interval;
   private String filterQueryText;
   private Document document;
   private String[] sortFields;
diff --git a/ambari-infra-manager/src/main/resources/infra-manager.properties 
b/ambari-infra-manager/src/main/resources/infra-manager.properties
index 6830b81..d7bdc29 100644
--- a/ambari-infra-manager/src/main/resources/infra-manager.properties
+++ b/ambari-infra-manager/src/main/resources/infra-manager.properties
@@ -21,6 +21,7 @@ management.security.enabled=false
 management.health.solr.enabled=false
 infra-manager.server.data.folder=/tmp/ambariInfraManager
 
+# Archive Service Logs
 infra-manager.jobs.solr_data_archiving.archive_service_logs.enabled=true
 
infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.zoo_keeper_connection_string=zookeeper:2181
 
infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.collection=hadoop_logs
@@ -28,7 +29,7 @@ 
infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.query_text=logt
 
infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.filter_query_text=(logtime:${logtime}
 AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]
 
infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[0]=logtime
 
infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.sort_column[1]=id
-infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.delete_query_text=logtime:[${start.logtime}
 TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
+#infra-manager.jobs.solr_data_archiving.archive_service_logs.solr.delete_query_text=logtime:[${start.logtime}
 TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
 infra-manager.jobs.solr_data_archiving.archive_service_logs.read_block_size=100
 
infra-manager.jobs.solr_data_archiving.archive_service_logs.write_block_size=150
 infra-manager.jobs.solr_data_archiving.archive_service_logs.destination=LOCAL
@@ -38,41 +39,50 @@ 
infra-manager.jobs.solr_data_archiving.archive_service_logs.file_name_suffix_dat
 infra-manager.jobs.solr_data_archiving.archive_service_logs.ttl=PT24H
 
infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.enabled=false
 infra-manager.jobs.solr_data_archiving.archive_service_logs.scheduling.cron=0 
* * * * ?
+# Archive Audit Logs
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.enabled=true
 
infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.zoo_keeper_connection_string=zookeeper:2181
 
infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.collection=audit_logs
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.query_text=logtime:[${start}
 TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.filter_query_text=(logtime:${logtime}
 AND id:{${id} TO *]) OR logtime:{${logtime} TO ${end}]
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[0]=logtime
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.query_text=evtTime:[${start}
 TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.filter_query_text=(evtTime:${evtTime}
 AND id:{${id} TO *]) OR evtTime:{${evtTime} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[0]=evtTime
 
infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.sort_column[1]=id
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.delete_query_text=logtime:[${start.logtime}
 TO ${end.logtime}} OR (logtime:${end.logtime} AND id:[* TO ${end.id}])
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.solr.delete_query_text=evtTime:[${start.evtTime}
 TO ${end.evtTime}} OR (evtTime:${end.evtTime} AND id:[* TO ${end.id}])
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.read_block_size=100
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.write_block_size=150
 infra-manager.jobs.solr_data_archiving.archive_audit_logs.destination=S3
 # TODO: logtime may not be enough: The same filename can be generated when 
more than write_block_size count docs has the same logtime value
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_column=logtime
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_column=evtTime
 
infra-manager.jobs.solr_data_archiving.archive_audit_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
 
infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_endpoint=hdfs://namenode:9000/
-infra-manager.jobs.solr_data_archiving.archive_audit_logs.hdfs_destination_directory=/test_audit_logs
+infra-manager.jobs.solr_data_archiving.archive_audit_logs.local_destination_directory=/tmp/ambariInfraManager
 
#infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_access_file=<any>.csv
 
infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_key_prefix=solr_archive_
 
infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_bucket_name=testbucket
 
infra-manager.jobs.solr_data_archiving.archive_audit_logs.s3_endpoint=http://fakes3:4569
-# TODO: configure ranger audit logs
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.zoo_keeper_connection_string=zookeeper:2181
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.read_block_size=100
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.write_block_size=150
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.file_name_suffix_column=logtime
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.destination_directory_path=/tmp/ambariInfraManager
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.collection=hadoop_logs
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.query_text=logtime:[*
 TO "${end}"]
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.filter_query_text=(logtime:"${logtime}"
 AND id:{"${id}" TO *]) OR logtime:{"${logtime}" TO "${end}"]
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.sort_column[0]=logtime
-#infra-manager.jobs.solr_data_archiving.export_ranger_audit_logs.query.sort_column[1]=id
+# Archive Ranger Audit Logs
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.enabled=true
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.zoo_keeper_connection_string=zookeeper:2181
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.collection=ranger_audits
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.query_text=evtTime:[${start}
 TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.filter_query_text=(evtTime:${evtTime}
 AND id:{${id} TO *]) OR evtTime:{${evtTime} TO ${end}]
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.sort_column[0]=evtTime
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.sort_column[1]=id
+#infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.solr.delete_query_text=evtTime:[${start.evtTime}
 TO ${end.evtTime}} OR (evtTime:${end.evtTime} AND id:[* TO ${end.id}])
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.read_block_size=10000
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.write_block_size=10000
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.destination=LOCAL
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.local_destination_directory=/tmp/ambariInfraManager
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.file_name_suffix_column=evtTime
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.file_name_suffix_date_format=yyyy-MM-dd'T'HH-mm-ss.SSSX
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.ttl=PT24H
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.scheduling.enabled=false
+infra-manager.jobs.solr_data_archiving.archive_ranger_audit_logs.scheduling.cron=0
 * * * * ?
+
 infra-manager.jobs.solr_data_deleting.delete_audit_logs.enabled=true
 
infra-manager.jobs.solr_data_deleting.delete_audit_logs.zoo_keeper_connection_string=zookeeper:2181
 infra-manager.jobs.solr_data_deleting.delete_audit_logs.collection=audit_logs
-infra-manager.jobs.solr_data_deleting.delete_audit_logs.filter_field=logtime
+infra-manager.jobs.solr_data_deleting.delete_audit_logs.filter_field=evtTime
 infra-manager.jobs.clean-up.ttl=PT24H
 infra-manager.jobs.clean-up.scheduling.enabled=true
 infra-manager.jobs.clean-up.scheduling.cron=0 * * * * ?
diff --git 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
index b31110c..d2e7b04 100644
--- 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
+++ 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentExporterTest.java
@@ -19,6 +19,15 @@
 
 package org.apache.ambari.infra.job.archive;
 
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.Is.is;
+
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.util.HashMap;
+
 import org.apache.ambari.infra.job.JobContextRepository;
 import org.easymock.EasyMockRunner;
 import org.easymock.EasyMockSupport;
@@ -36,24 +45,15 @@
 import org.springframework.batch.item.ItemStreamReader;
 import org.springframework.batch.repeat.RepeatStatus;
 
-import java.io.IOException;
-import java.io.UncheckedIOException;
-import java.util.HashMap;
-
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.core.Is.is;
-
 @RunWith(EasyMockRunner.class)
 public class DocumentExporterTest extends EasyMockSupport {
 
   private static final long JOB_EXECUTION_ID = 1L;
   private static final long STEP_EXECUTION_ID = 1L;
-  private static final Document DOCUMENT_2 = new Document(new HashMap<String, 
String>() {{
+  private static final Document DOCUMENT_2 = new Document(new HashMap<String, 
Object>() {{
     put("id", "2");
   }});
-  private static final Document DOCUMENT_3 = new Document(new HashMap<String, 
String>() {{
+  private static final Document DOCUMENT_3 = new Document(new HashMap<String, 
Object>() {{
     put("id", "3");
   }});
   private DocumentExporter documentExporter;
@@ -70,9 +70,8 @@
   @Mock
   private JobContextRepository jobContextRepository;
 
-//  private ExecutionContext executionContext;
   private ChunkContext chunkContext;
-  private static final Document DOCUMENT = new Document(new HashMap<String, 
String>() {{ put("id", "1"); }});
+  private static final Document DOCUMENT = new Document(new HashMap<String, 
Object>() {{ put("id", "1"); }});
 
   @Before
   public void setUp() throws Exception {
diff --git 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
index 0776c3c..9d2b21a 100644
--- 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
+++ 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/DocumentItemReaderTest.java
@@ -19,6 +19,14 @@
 
 package org.apache.ambari.infra.job.archive;
 
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNull.nullValue;
+import static org.junit.Assert.assertThat;
+
+import java.util.HashMap;
+
 import org.apache.ambari.infra.job.CloseableIterator;
 import org.apache.ambari.infra.job.ObjectSource;
 import org.easymock.EasyMockRunner;
@@ -30,19 +38,11 @@
 import org.junit.runner.RunWith;
 import org.springframework.batch.item.ExecutionContext;
 
-import java.util.HashMap;
-
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.hamcrest.core.Is.is;
-import static org.hamcrest.core.IsNull.nullValue;
-import static org.junit.Assert.assertThat;
-
 @RunWith(EasyMockRunner.class)
 public class DocumentItemReaderTest extends EasyMockSupport {
-  private static final Document DOCUMENT = new Document(new HashMap<String, 
String>() {{ put("id", "1"); }});
-  private static final Document DOCUMENT_2 = new Document(new HashMap<String, 
String>() {{ put("id", "2"); }});
-  private static final Document DOCUMENT_3 = new Document(new HashMap<String, 
String>() {{ put("id", "3"); }});
+  private static final Document DOCUMENT = new Document(new HashMap<String, 
Object>() {{ put("id", "1"); }});
+  private static final Document DOCUMENT_2 = new Document(new HashMap<String, 
Object>() {{ put("id", "2"); }});
+  private static final Document DOCUMENT_3 = new Document(new HashMap<String, 
Object>() {{ put("id", "3"); }});
   private static final int READ_BLOCK_SIZE = 2;
 
   private DocumentItemReader documentItemReader;
@@ -117,7 +117,7 @@ public void 
testReadWhenCollectionContainsExactlySameCountElementsAsReadBlockSiz
 
   @Test
   public void testReadWhenCollectionContainsMoreElementsThanReadBlockSize() 
throws Exception {
-    Document document3 = new Document(new HashMap<String, String>() {{ 
put("id", "2"); }});
+    Document document3 = new Document(new HashMap<String, Object>() {{ 
put("id", "2"); }});
 
     expect(documentSource.open(null, 2)).andReturn(documentIterator);
     expect(documentSource.open(DOCUMENT_2, 2)).andReturn(documentIterator2);
diff --git 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java
 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java
index cca2c1a..1fa8434 100644
--- 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java
+++ 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/FileNameSuffixFormatterTest.java
@@ -1,11 +1,11 @@
 package org.apache.ambari.infra.job.archive;
 
-import org.junit.Test;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
 
 import java.util.HashMap;
 
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
+import org.junit.Test;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -42,17 +42,17 @@ public void 
testFormatWhenSpecifiedColumnDoesNotExistsInTheDocumentThrowingExcep
 
   @Test(expected = IllegalArgumentException.class)
   public void 
testFormatWhenSpecifiedColumnContainsBlankValueThrowingException() throws 
Exception {
-    formatter.format(new Document(new HashMap<String, String>() {{ 
put("logtime", "  "); }}));
+    formatter.format(new Document(new HashMap<String, Object>() {{ 
put("logtime", "  "); }}));
   }
 
   @Test
   public void testFormatWhenNoDateFormatSpecifiedRawColumnValueReturned() 
throws Exception {
     FileNameSuffixFormatter formatter = new FileNameSuffixFormatter("logtime", 
null);
-    assertThat(formatter.format(new Document(new HashMap<String, String>() {{ 
put("logtime", "Monday"); }})), is("Monday"));
+    assertThat(formatter.format(new Document(new HashMap<String, Object>() {{ 
put("logtime", "Monday"); }})), is("Monday"));
   }
 
   @Test
   public void testFormatWhenDateFormatIsSpecifiedAFormattedValueReturned() 
throws Exception {
-    assertThat(formatter.format(new Document(new HashMap<String, String>() {{ 
put("logtime", "2017-12-15T10:12:33.453Z"); }})), 
is("2017-12-15T10-12-33-453Z"));
+    assertThat(formatter.format(new Document(new HashMap<String, Object>() {{ 
put("logtime", "2017-12-15T10:12:33.453Z"); }})), 
is("2017-12-15T10-12-33-453Z"));
   }
 }
\ No newline at end of file
diff --git 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
index 85e79e1..af8b86d 100644
--- 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
+++ 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/LocalDocumentItemWriterTest.java
@@ -19,7 +19,19 @@
 
 package org.apache.ambari.infra.job.archive;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
+import static org.easymock.EasyMock.cmp;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.LogicalOperator.EQUAL;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+
 import org.apache.commons.io.FileUtils;
 import org.easymock.EasyMockRunner;
 import org.easymock.EasyMockSupport;
@@ -29,25 +41,14 @@
 import org.junit.Test;
 import org.junit.runner.RunWith;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-
-import static org.easymock.EasyMock.cmp;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.easymock.LogicalOperator.EQUAL;
-import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
+import com.fasterxml.jackson.databind.ObjectMapper;
 
 @RunWith(EasyMockRunner.class)
 public class LocalDocumentItemWriterTest extends EasyMockSupport {
 
-  private static final Document DOCUMENT = new Document(new HashMap<String, 
String>() {{ put("id", "1"); }});
-  private static final Document DOCUMENT2 = new Document(new HashMap<String, 
String>() {{ put("id", "2"); }});
-  private static final Document DOCUMENT3 = new Document(new HashMap<String, 
String>() {{ put("id", "3"); }});
+  private static final Document DOCUMENT = new Document(new HashMap<String, 
Object>() {{ put("id", "1"); }});
+  private static final Document DOCUMENT2 = new Document(new HashMap<String, 
Object>() {{ put("id", "2"); }});
+  private static final Document DOCUMENT3 = new Document(new HashMap<String, 
Object>() {{ put("id", "3"); }});
   private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
 
   private LocalDocumentItemWriter localDocumentItemWriter;
@@ -80,9 +81,9 @@ public void testWrite() throws Exception {
 
     List<Document> documentList = readBack(outFile);
     assertThat(documentList.size(), is(3));
-    assertThat(documentList.get(0).get("id"), is(DOCUMENT.get("id")));
-    assertThat(documentList.get(1).get("id"), is(DOCUMENT2.get("id")));
-    assertThat(documentList.get(2).get("id"), is(DOCUMENT3.get("id")));
+    assertThat(documentList.get(0).getString("id"), 
is(DOCUMENT.getString("id")));
+    assertThat(documentList.get(1).getString("id"), 
is(DOCUMENT2.getString("id")));
+    assertThat(documentList.get(2).getString("id"), 
is(DOCUMENT3.getString("id")));
   }
 
   private Comparator<WriteCompletedEvent> 
writeCompletedEventEqualityComparator() {
diff --git 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java
 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java
index 018c993..6334a15 100644
--- 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java
+++ 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrParametrizedStringTest.java
@@ -1,12 +1,12 @@
 package org.apache.ambari.infra.job.archive;
 
-import org.junit.Test;
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertThat;
 
 import java.util.HashMap;
 import java.util.Map;
 
-import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertThat;
+import org.junit.Test;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -28,9 +28,9 @@
  */
 public class SolrParametrizedStringTest {
 
-  private static final Map<String, String> PARAMETERS_1 = new HashMap<String, 
String>() {{ put("id", "1"); put("name", "User"); put("product", "Computer"); 
}};
-  private static final Map<String, String> PARAMETERS_START = new 
HashMap<String, String>() {{ put("price", "1000"); }};
-  private static final Map<String, String> PARAMETERS_END = new 
HashMap<String, String>() {{ put("price", "2000"); }};
+  private static final Map<String, Object> PARAMETERS_1 = new HashMap<String, 
Object>() {{ put("id", "1"); put("name", "User"); put("product", "Computer"); 
}};
+  private static final Map<String, Object> PARAMETERS_START = new 
HashMap<String, Object>() {{ put("price", "1000"); }};
+  private static final Map<String, Object> PARAMETERS_END = new 
HashMap<String, Object>() {{ put("price", "2000"); }};
 
   @Test
   public void testToStringEmptyStringResultsEmptyString() {
diff --git 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
index 0f7049b..4f45189 100644
--- 
a/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
+++ 
b/ambari-infra-manager/src/test/java/org/apache/ambari/infra/job/archive/SolrQueryBuilderTest.java
@@ -33,7 +33,7 @@
 import org.junit.Test;
 
 public class SolrQueryBuilderTest {
-  private static final Document DOCUMENT = new Document(new HashMap<String, 
String>() {{
+  private static final Document DOCUMENT = new Document(new HashMap<String, 
Object>() {{
     put("logtime", "2017-10-02'T'10:00:11.634Z");
     put("id", "1");
   }});


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


> Infra Manager: hive support for archiving Infra Solr
> ----------------------------------------------------
>
>                 Key: AMBARI-24761
>                 URL: https://issues.apache.org/jira/browse/AMBARI-24761
>             Project: Ambari
>          Issue Type: Bug
>          Components: infra
>    Affects Versions: 2.8.0
>            Reporter: Krisztian Kasa
>            Assignee: Krisztian Kasa
>            Priority: Major
>              Labels: pull-request-available
>             Fix For: 2.8.0
>
>
> When exporting Solr documents from logsearch and ranger collections save it 
> to a format which can be parsed by Hive.



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to