http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java index 65c3b6b..ab3d3cf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/CounterStatsPublisher.java @@ -20,14 +20,14 @@ package org.apache.hadoop.hive.ql.stats; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.mapred.Reporter; public class CounterStatsPublisher implements StatsPublisher { - private static final Log LOG = LogFactory.getLog(CounterStatsPublisher.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(CounterStatsPublisher.class.getName()); private Reporter reporter;
http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java index 053fa18..a53fcc0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.stats; import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.StatsSetupConst.StatDB; import org.apache.hadoop.hive.conf.HiveConf; @@ -38,7 +38,7 @@ import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_STATS_KEY_PREFI */ public final class StatsFactory { - static final private Log LOG = LogFactory.getLog(StatsFactory.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StatsFactory.class.getName()); private Class <? extends Serializable> publisherImplementation; private Class <? extends Serializable> aggregatorImplementation; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java index cc8c9e8..e1f8ebc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java @@ -22,8 +22,8 @@ import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.common.math.LongMath; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -98,7 +98,7 @@ import java.util.Set; public class StatsUtils { - private static final Log LOG = LogFactory.getLog(StatsUtils.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(StatsUtils.class.getName()); /** http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java index f5303ae..5c5fafa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsAggregator.java @@ -24,8 +24,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -38,7 +38,7 @@ import org.apache.hadoop.hive.ql.stats.StatsCollectionContext; import com.esotericsoftware.kryo.io.Input; public class FSStatsAggregator implements StatsAggregator { - private final Log LOG = LogFactory.getLog(this.getClass().getName()); + private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); private List<Map<String,Map<String,String>>> statsList; private Map<String, Map<String,String>> statsMap; private FileSystem fs; @@ -69,7 +69,7 @@ public class FSStatsAggregator implements StatsAggregator { } return true; } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to read stats from filesystem ", e); return false; } } @@ -107,7 +107,7 @@ public class FSStatsAggregator implements StatsAggregator { fs.delete(statsDir,true); return true; } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to delete stats dir", e); return true; } } http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java index e5a907c..80f954b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/fs/FSStatsPublisher.java @@ -24,8 +24,8 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; @@ -38,7 +38,7 @@ import com.esotericsoftware.kryo.io.Output; public class FSStatsPublisher implements StatsPublisher { private Configuration conf; - private final Log LOG = LogFactory.getLog(this.getClass().getName()); + private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); private Map<String, Map<String,String>> statsMap; // map from partID -> (statType->value) @Override @@ -52,7 +52,7 @@ public class FSStatsPublisher implements StatsPublisher { } return true; } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to create dir", e); return false; } } @@ -68,7 +68,7 @@ public class FSStatsPublisher implements StatsPublisher { try { return statsDir.getFileSystem(conf).exists(statsDir); } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to check if dir exists", e); return false; } } @@ -104,7 +104,7 @@ public class FSStatsPublisher implements StatsPublisher { output.close(); return true; } catch (IOException e) { - LOG.error(e); + LOG.error("Failed to persist stats on filesystem",e); return false; } } http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/txn/AcidHouseKeeperService.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/AcidHouseKeeperService.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/AcidHouseKeeperService.java index 23a77e6..dee7601 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/AcidHouseKeeperService.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/AcidHouseKeeperService.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HouseKeeperService; import org.apache.hadoop.hive.metastore.txn.TxnHandler; @@ -36,9 +36,9 @@ import java.util.concurrent.atomic.AtomicInteger; * Runs inside Hive Metastore Service. */ public class AcidHouseKeeperService implements HouseKeeperService { - private static final Log LOG = LogFactory.getLog(AcidHouseKeeperService.class); + private static final Logger LOG = LoggerFactory.getLogger(AcidHouseKeeperService.class); private ScheduledExecutorService pool = null; - private AtomicInteger isAliveCounter = new AtomicInteger(Integer.MIN_VALUE); + private final AtomicInteger isAliveCounter = new AtomicInteger(Integer.MIN_VALUE); @Override public void start(HiveConf hiveConf) throws Exception { HiveTxnManager mgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(hiveConf); @@ -90,7 +90,7 @@ public class AcidHouseKeeperService implements HouseKeeperService { LOG.info("timeout reaper ran for " + (System.currentTimeMillis() - startTime)/1000 + "seconds. isAliveCounter=" + count); } catch(Throwable t) { - LOG.fatal("Serious error in " + Thread.currentThread().getName() + ": " + t.getMessage(), t); + LOG.error("Serious error in {}", Thread.currentThread().getName(), ": {}" + t.getMessage(), t); } } } http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Cleaner.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Cleaner.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Cleaner.java index 622bf54..b847202 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Cleaner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Cleaner.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -52,7 +52,7 @@ import java.util.concurrent.TimeUnit; */ public class Cleaner extends CompactorThread { static final private String CLASS_NAME = Cleaner.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); private long cleanerCheckInterval = 0; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java index bab01a9..7d0f46a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FileStatus; @@ -75,7 +75,7 @@ import java.util.regex.Matcher; public class CompactorMR { static final private String CLASS_NAME = CompactorMR.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); static final private String INPUT_FORMAT_CLASS_NAME = "hive.compactor.input.format.class.name"; static final private String OUTPUT_FORMAT_CLASS_NAME = "hive.compactor.output.format.class.name"; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java index c956f58..3f6b099 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -47,7 +47,7 @@ import java.util.concurrent.atomic.AtomicBoolean; */ abstract class CompactorThread extends Thread implements MetaStoreThread { static final private String CLASS_NAME = CompactorThread.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); protected HiveConf conf; protected CompactionTxnHandler txnHandler; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java index f265311..2d051fd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Initiator.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -28,7 +28,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.CompactionRequest; import org.apache.hadoop.hive.metastore.api.CompactionType; import org.apache.hadoop.hive.metastore.api.MetaException; -import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.ShowCompactRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; @@ -57,7 +56,7 @@ import java.util.concurrent.atomic.AtomicBoolean; */ public class Initiator extends CompactorThread { static final private String CLASS_NAME = Initiator.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); private long checkInterval; @@ -269,7 +268,7 @@ public class Initiator extends CompactorThread { msg.append(deltaPctThreshold); msg.append(" will major compact: "); msg.append(bigEnough); - LOG.debug(msg); + LOG.debug(msg.toString()); } if (bigEnough) return CompactionType.MAJOR; } http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java index cc7441a..002464f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.txn.compactor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; @@ -50,7 +50,7 @@ import java.util.concurrent.atomic.AtomicBoolean; */ public class Worker extends CompactorThread { static final private String CLASS_NAME = Worker.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME); static final private long SLEEP_TIME = 5000; static final private int baseThreadNum = 10002; @@ -205,7 +205,7 @@ public class Worker extends CompactorThread { } static final class StatsUpdater { - static final private Log LOG = LogFactory.getLog(StatsUpdater.class); + static final private Logger LOG = LoggerFactory.getLogger(StatsUpdater.class); public static StatsUpdater init(CompactionInfo ci, List<String> columnListForStats, HiveConf conf, String userName) { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java index a042116..dfd88bb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFE.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -33,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable; ) public class UDFE extends UDF { @SuppressWarnings("unused") - private static Log LOG = LogFactory.getLog(UDFE.class.getName() ); + private static final Logger LOG = LoggerFactory.getLogger(UDFE.class.getName() ); DoubleWritable result = new DoubleWritable(); public UDFE() { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java index 07288c1..7b0656f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPI.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -33,7 +33,7 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable; ) public class UDFPI extends UDF { @SuppressWarnings("unused") - private static Log LOG = LogFactory.getLog(UDFPI.class.getName() ); + private static final Logger LOG = LoggerFactory.getLogger(UDFPI.class.getName() ); DoubleWritable result = new DoubleWritable(); public UDFPI() { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java index 9f78449..cd2449f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFAverage.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -57,7 +57,7 @@ import org.apache.hadoop.util.StringUtils; @Description(name = "avg", value = "_FUNC_(x) - Returns the mean of a set of numbers") public class GenericUDAFAverage extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFAverage.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFAverage.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java index 8482e18..0e96f89 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -43,6 +43,8 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GenericUDAFComputeStats @@ -52,7 +54,7 @@ import org.apache.hadoop.util.StringUtils; value = "_FUNC_(x) - Returns the statistical summary of a set of primitive type values.") public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFComputeStats.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFComputeStats.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) @@ -215,16 +217,10 @@ public class GenericUDAFComputeStats extends AbstractGenericUDAFResolver { BooleanStatsAgg myagg = (BooleanStatsAgg) agg; LOG.debug(functionName); - - LOG.debug("Count of True Values:"); - LOG.debug(myagg.countTrues); - - LOG.debug("Count of False Values:"); - LOG.debug(myagg.countFalses); - - LOG.debug("Count of Null Values:"); - LOG.debug(myagg.countNulls); - } + LOG.debug("Count of True Values: {}", myagg.countTrues); + LOG.debug("Count of False Values: {}", myagg.countFalses); + LOG.debug("Count of Null Values: {}", myagg.countNulls); + } boolean warned = false; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java index 49e3dcf..39b632b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFContextNGrams.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -57,7 +57,7 @@ import org.apache.hadoop.io.Text; "would attempt to determine the 10 most common two-word phrases that follow \"i love\" " + "in a database of free-form natural language movie reviews.") public class GenericUDAFContextNGrams implements GenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFContextNGrams.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFContextNGrams.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java index 22b8545..6172812 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java @@ -22,8 +22,8 @@ import java.util.ArrayList; import javaewah.EWAHCompressedBitmap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectInput; @@ -50,7 +50,7 @@ import org.apache.hadoop.util.StringUtils; @Description(name = "ewah_bitmap", value = "_FUNC_(expr) - Returns an EWAH-compressed bitmap representation of a column.") public class GenericUDAFEWAHBitmap extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFEWAHBitmap.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFEWAHBitmap.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java index 1bed46b..160ce91 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFFirstValue.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayDeque; import java.util.Deque; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; @@ -52,7 +52,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; ) public class GenericUDAFFirstValue extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFFirstValue.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFFirstValue.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java index 434956f..ffb53c2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFHistogramNumeric.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -59,7 +59,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; + "statistical computing packages.") public class GenericUDAFHistogramNumeric extends AbstractGenericUDAFResolver { // class static variables - static final Log LOG = LogFactory.getLog(GenericUDAFHistogramNumeric.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFHistogramNumeric.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java index fa5047d..f6b5aef 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLag.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -44,7 +44,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFLeadLag.GenericUDAFLeadL ) public class GenericUDAFLag extends GenericUDAFLeadLag { - static final Log LOG = LogFactory.getLog(GenericUDAFLag.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLag.class.getName()); @Override http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java index aa98cc9..f917621 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLastValue.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; @@ -41,7 +41,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; supportsWindow = true, pivotResult = false, impliesOrder = true) public class GenericUDAFLastValue extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFLastValue.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLastValue.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java index 6a27325..8f57a1b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLead.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -40,7 +40,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuf ) public class GenericUDAFLead extends GenericUDAFLeadLag { - static final Log LOG = LogFactory.getLog(GenericUDAFLead.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLead.class.getName()); @Override http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java index 79abc0c..376b73e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFLeadLag.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -38,7 +38,7 @@ import org.apache.hadoop.io.IntWritable; */ public abstract class GenericUDAFLeadLag extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFLead.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFLead.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo parameters) http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java index 55a6a62..98abd5c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMax.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayDeque; import java.util.Deque; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,7 +41,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @Description(name = "max", value = "_FUNC_(expr) - Returns the maximum value of expr") public class GenericUDAFMax extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFMax.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFMax.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java index 816350f..bde36e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFMin.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -36,7 +36,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; @Description(name = "min", value = "_FUNC_(expr) - Returns the minimum value of expr") public class GenericUDAFMin extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFMin.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFMin.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java index 83693a8..b5d0c77 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFNTile.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; @@ -51,7 +51,7 @@ import org.apache.hadoop.io.IntWritable; ) public class GenericUDAFNTile extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFNTile.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFNTile.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java index b73a6eb..66e42ed 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentRank.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -45,7 +45,7 @@ import org.apache.hadoop.io.IntWritable; ) public class GenericUDAFPercentRank extends GenericUDAFRank { - static final Log LOG = LogFactory.getLog(GenericUDAFPercentRank.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFPercentRank.class.getName()); @Override protected GenericUDAFAbstractRankEvaluator createEvaluator() { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java index 89d95f8..795013a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileApprox.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -64,7 +64,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn "> SELECT percentile_approx(val, array(0.5, 0.95, 0.98), 100000) FROM somedata;\n" + "[0.05,1.64,2.26]\n") public class GenericUDAFPercentileApprox extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFPercentileApprox.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFPercentileApprox.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo info) throws SemanticException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java index 528cdbc..8ade68b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRank.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; @@ -48,7 +48,7 @@ import org.apache.hadoop.io.IntWritable; impliesOrder = true) public class GenericUDAFRank extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFRank.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFRank.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java index d733e2e..8e672e6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFRowNumber.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.exec.WindowFunctionDescription; @@ -45,7 +45,7 @@ import org.apache.hadoop.io.IntWritable; ) public class GenericUDAFRowNumber extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFRowNumber.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFRowNumber.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java index c6ffbec..0968008 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFSum.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -48,7 +48,7 @@ import org.apache.hadoop.util.StringUtils; @Description(name = "sum", value = "_FUNC_(x) - Returns the sum of a set of numbers") public class GenericUDAFSum extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFSum.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFSum.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java index 2950605..dcd90eb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -50,7 +50,7 @@ import org.apache.hadoop.util.StringUtils; value = "_FUNC_(x) - Returns the variance of a set of numbers") public class GenericUDAFVariance extends AbstractGenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFVariance.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFVariance.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java index 1c9456e..7febbf4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFnGrams.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -55,7 +55,7 @@ import org.apache.hadoop.io.Text; + "The output is an array of structs with the top-k n-grams. It might be convenient " + "to explode() the output of this UDAF.") public class GenericUDAFnGrams implements GenericUDAFResolver { - static final Log LOG = LogFactory.getLog(GenericUDAFnGrams.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDAFnGrams.class.getName()); @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java index 1343d3b..30ef5ab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.sql.Timestamp; import java.util.TimeZone; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -38,7 +38,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn + "Assumes given timestamp is UTC and converts to given timezone (as of Hive 0.8.0)") public class GenericUDFFromUtcTimestamp extends GenericUDF { - static final Log LOG = LogFactory.getLog(GenericUDFFromUtcTimestamp.class); + static final Logger LOG = LoggerFactory.getLogger(GenericUDFFromUtcTimestamp.class); private transient PrimitiveObjectInspector[] argumentOIs; private transient TimestampConverter timestampConverter; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRegExp.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRegExp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRegExp.java index 0a9dd7b..54d9085 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRegExp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRegExp.java @@ -23,8 +23,8 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; @@ -48,7 +48,7 @@ import org.apache.hadoop.io.BooleanWritable; + " > SELECT 'fb' _FUNC_ '.*' FROM src LIMIT 1;\n" + " true") @VectorizedExpressions({FilterStringColRegExpStringScalar.class}) public class GenericUDFRegExp extends GenericUDF { - static final Log LOG = LogFactory.getLog(GenericUDFRegExp.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(GenericUDFRegExp.class.getName()); private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2]; private transient Converter[] converters = new Converter[2]; private final BooleanWritable output = new BooleanWritable(); @@ -130,4 +130,4 @@ public class GenericUDFRegExp extends GenericUDF { protected String getFuncName() { return "regexp"; } -} \ No newline at end of file +} http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java index 24b49a0..9f3ab91 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java index 83e36a5..aa715f5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToChar.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,7 +41,7 @@ extended = "Values will be truncated if the input value is too long to fit" + " > SELECT CAST(1234 AS char(10)) FROM src LIMIT 1;\n" + " '1234'") public class GenericUDFToChar extends GenericUDF implements SettableUDF, Serializable { - private static final Log LOG = LogFactory.getLog(GenericUDFToChar.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDFToChar.class.getName()); private transient PrimitiveObjectInspector argumentOI; private transient HiveCharConverter converter; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java index b857f6a..5db154f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToVarchar.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.io.Serializable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,7 +41,7 @@ extended = "Values will be truncated if the input value is too long to fit" + " > SELECT CAST(1234 AS varchar(10)) FROM src LIMIT 1;\n" + " '1234'") public class GenericUDFToVarchar extends GenericUDF implements SettableUDF, Serializable { - private static final Log LOG = LogFactory.getLog(GenericUDFToVarchar.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDFToVarchar.class.getName()); private transient PrimitiveObjectInspector argumentOI; private transient HiveVarcharConverter converter; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java index c1b2a01..118acdc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import java.io.PrintStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -38,7 +38,7 @@ import org.apache.hadoop.io.LongWritable; extended = "Converts the specified time to number of seconds " + "since 1970-01-01. The _FUNC_(void) overload is deprecated, use current_timestamp.") public class GenericUDFUnixTimeStamp extends GenericUDFToUnixTimeStamp { - private static final Log LOG = LogFactory.getLog(GenericUDFUnixTimeStamp.class); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDFUnixTimeStamp.class); private LongWritable currentTimestamp; // retValue is transient so store this separately. @Override protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentException { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java index ea5aeec..b710015 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFJSONTuple.java @@ -23,8 +23,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -51,7 +51,7 @@ import org.codehaus.jackson.type.JavaType; public class GenericUDTFJSONTuple extends GenericUDTF { - private static Log LOG = LogFactory.getLog(GenericUDTFJSONTuple.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDTFJSONTuple.class.getName()); private static final JsonFactory JSON_FACTORY = new JsonFactory(); static { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java index f3ef0f5..824c41d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFParseUrlTuple.java @@ -24,8 +24,8 @@ import java.util.ArrayList; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -58,7 +58,7 @@ public class GenericUDTFParseUrlTuple extends GenericUDTF { HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, USERINFO, QUERY_WITH_KEY, NULLNAME }; - private static Log LOG = LogFactory.getLog(GenericUDTFParseUrlTuple.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(GenericUDTFParseUrlTuple.class.getName()); int numCols; // number of output columns String[] paths; // array of pathnames, each of which corresponds to a column http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java index 1424ba8..d33369b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NGramEstimator.java @@ -27,8 +27,8 @@ import java.util.Comparator; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A generic, re-usable n-gram estimation class that supports partial aggregations. http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java index 7cc5734..99f4d71 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/NumDistinctValueEstimator.java @@ -20,15 +20,15 @@ import java.util.Random; import javolution.util.FastBitSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.util.JavaDataModel; import org.apache.hadoop.io.Text; public class NumDistinctValueEstimator { - static final Log LOG = LogFactory.getLog(NumDistinctValueEstimator.class.getName()); + static final Logger LOG = LoggerFactory.getLogger(NumDistinctValueEstimator.class.getName()); /* We want a,b,x to come from a finite field of size 0 to k, where k is a prime number. * 2^p - 1 is prime for p = 31. Hence bitvectorSize has to be 31. Pick k to be 2^p -1. @@ -150,10 +150,8 @@ public class NumDistinctValueEstimator { String t = new String(); LOG.debug("NumDistinctValueEstimator"); - LOG.debug("Number of Vectors:"); - LOG.debug(numBitVectors); - LOG.debug("Vector Size: "); - LOG.debug(BIT_VECTOR_SIZE); + LOG.debug("Number of Vectors: {}", numBitVectors); + LOG.debug("Vector Size: {}", BIT_VECTOR_SIZE); for (int i=0; i < numBitVectors; i++) { t = t + bitVector[i].toString(); @@ -353,7 +351,7 @@ public class NumDistinctValueEstimator { } avgLeastSigZero = - (double)(sumLeastSigZero/(numBitVectors * 1.0)) - (Math.log(PHI)/Math.log(2.0)); + sumLeastSigZero/(numBitVectors * 1.0) - (Math.log(PHI)/Math.log(2.0)); numDistinctValues = Math.pow(2.0, avgLeastSigZero); return ((long)(numDistinctValues)); } http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java index 63d35ae..9d21103 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java @@ -27,8 +27,8 @@ import java.util.List; import java.util.Map; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -65,7 +65,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn @SuppressWarnings("deprecation") public class WindowingTableFunction extends TableFunctionEvaluator { - public static final Log LOG =LogFactory.getLog(WindowingTableFunction.class.getName()); + public static final Logger LOG =LoggerFactory.getLogger(WindowingTableFunction.class.getName()); static class WindowingFunctionInfoHelper { private boolean supportsWindow; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java index f6cb8ac..0e99874 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java @@ -18,13 +18,13 @@ package org.apache.hadoop.hive.ql.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.zookeeper.Watcher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class ZooKeeperHiveHelper { - public static final Log LOG = LogFactory.getLog(ZooKeeperHiveHelper.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(ZooKeeperHiveHelper.class.getName()); public static final String ZOOKEEPER_PATH_SEPARATOR = "/"; /** http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index e6d3b29..7ab94a2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -26,8 +26,8 @@ import java.util.List; import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -73,7 +73,7 @@ public class TestExecDriver extends TestCase { static HiveConf conf; private static final String tmpdir; - private static final Log LOG = LogFactory.getLog(TestExecDriver.class); + private static final Logger LOG = LoggerFactory.getLogger(TestExecDriver.class); private static final Path tmppath; private static Hive db; private static FileSystem fs; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java index 7521466..68c598a 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.exec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -86,7 +86,7 @@ import java.util.Properties; */ public class TestFileSinkOperator { private static String PARTCOL_NAME = "partval"; - static final private Log LOG = LogFactory.getLog(TestFileSinkOperator.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TestFileSinkOperator.class.getName()); private static File tmpdir; private static TableDesc nonAcidTableDescriptor; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java index ccc21e9..028cdd1 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java @@ -34,8 +34,8 @@ import junit.framework.Assert; import junit.framework.TestCase; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -48,7 +48,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.mapred.JobConf; public class TestUtilities extends TestCase { - public static final Log LOG = LogFactory.getLog(TestUtilities.class); + public static final Logger LOG = LoggerFactory.getLogger(TestUtilities.class); public void testGetFileExtension() { JobConf jc = new JobConf(); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java index 595ffa6..16b5b17 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/mapjoin/TestMapJoinMemoryExhaustionHandler.java @@ -20,14 +20,14 @@ package org.apache.hadoop.hive.ql.exec.mapjoin; import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.junit.Before; import org.junit.Test; public class TestMapJoinMemoryExhaustionHandler { - private static final Log LOG = LogFactory.getLog(TestMapJoinMemoryExhaustionHandler.class); + private static final Logger LOG = LoggerFactory.getLogger(TestMapJoinMemoryExhaustionHandler.class); private LogHelper logHelper; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java index 4d93ea6..489383b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/spark/session/TestSparkSessionManagerImpl.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hive.ql.exec.spark.session; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.util.StringUtils; import org.junit.Test; @@ -32,7 +32,7 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class TestSparkSessionManagerImpl { - private static final Log LOG = LogFactory.getLog(TestSparkSessionManagerImpl.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSparkSessionManagerImpl.class); private SparkSessionManagerImpl sessionManagerHS2 = null; private boolean anyFailedSessionThread; // updated only when a thread has failed. http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java index 3354219..11c0325 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezSessionPool.java @@ -29,13 +29,13 @@ import java.util.Random; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; public class TestTezSessionPool { - private static final Log LOG = LogFactory.getLog(TestTezSessionPoolManager.class); + private static final Logger LOG = LoggerFactory.getLogger(TestTezSessionPoolManager.class); HiveConf conf; Random random; private TezSessionPoolManager poolManager; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java index 2fed9a7..515ea7b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java @@ -1012,7 +1012,7 @@ public class TestVectorizationContext { Assert.assertEquals(BRoundWithNumDigitsDoubleToDouble.class, ve.getClass()); Assert.assertEquals(4, ((BRoundWithNumDigitsDoubleToDouble) ve).getDecimalPlaces().get()); - // Log with int base + // Logger with int base gudfBridge = new GenericUDFBridge("log", false, UDFLog.class.getName()); mathFuncExpr.setGenericUDF(gudfBridge); children2.clear(); @@ -1023,7 +1023,7 @@ public class TestVectorizationContext { Assert.assertEquals(FuncLogWithBaseDoubleToDouble.class, ve.getClass()); Assert.assertTrue(4 == ((FuncLogWithBaseDoubleToDouble) ve).getBase()); - // Log with default base + // Logger with default base children2.clear(); children2.add(colDesc2); mathFuncExpr.setChildren(children2); @@ -1583,4 +1583,4 @@ public class TestVectorizationContext { b = 1; assertEquals(a != b ? 1 : 0, ((a - b) ^ (b - a)) >>> 63); } -} \ No newline at end of file +} http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java index cff5ada..a68049f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java @@ -36,8 +36,8 @@ import java.util.List; import java.util.Properties; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -81,7 +81,7 @@ import org.junit.Test; */ public class TestRCFile { - private static final Log LOG = LogFactory.getLog(TestRCFile.class); + private static final Logger LOG = LoggerFactory.getLogger(TestRCFile.class); private Configuration conf; private ColumnarSerDe serDe; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index 6f0b9df..08b8c32 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -26,8 +26,8 @@ import java.util.List; import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileSystem; @@ -57,8 +57,8 @@ import org.apache.hadoop.util.ReflectionUtils; */ @SuppressWarnings("deprecation") public class TestSymlinkTextInputFormat extends TestCase { - private static Log log = - LogFactory.getLog(TestSymlinkTextInputFormat.class); + private static final Logger log = + LoggerFactory.getLogger(TestSymlinkTextInputFormat.class); private Configuration conf; private JobConf job; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java index 60af40a..797bbfb 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcRawRecordMerger.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hive.ql.io.orc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -61,7 +61,7 @@ import static org.junit.Assert.assertNull; public class TestOrcRawRecordMerger { - private static final Log LOG = LogFactory.getLog(TestOrcRawRecordMerger.class); + private static final Logger LOG = LoggerFactory.getLogger(TestOrcRawRecordMerger.class); //todo: why is statementId -1? @Test public void testOrdering() throws Exception { http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java index 68c6542..c1e5c81 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java @@ -33,7 +33,7 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.txn.AcidHouseKeeperService; import org.apache.log4j.Level; -import org.apache.log4j.LogManager; +import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.is; import org.junit.After; import org.junit.Assert; @@ -49,12 +49,11 @@ import java.util.concurrent.TimeUnit; */ public class TestDbTxnManager { - private HiveConf conf = new HiveConf(); + private final HiveConf conf = new HiveConf(); private HiveTxnManager txnMgr; private AcidHouseKeeperService houseKeeperService = null; - private Context ctx; + private final Context ctx; private int nextInput; - private int nextOutput; HashSet<ReadEntity> readEntities; HashSet<WriteEntity> writeEntities; @@ -62,7 +61,6 @@ public class TestDbTxnManager { TxnDbUtil.setConfValues(conf); SessionState.start(conf); ctx = new Context(conf); - LogManager.getRootLogger().setLevel(Level.DEBUG); tearDown(); } @@ -363,7 +361,6 @@ public class TestDbTxnManager { txnMgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); Assert.assertTrue(txnMgr instanceof DbTxnManager); nextInput = 1; - nextOutput = 1; readEntities = new HashSet<ReadEntity>(); writeEntities = new HashSet<WriteEntity>(); conf.setTimeVar(HiveConf.ConfVars.HIVE_TIMEDOUT_TXN_REAPER_START, 0, TimeUnit.SECONDS); @@ -379,8 +376,8 @@ public class TestDbTxnManager { } private static class MockQueryPlan extends QueryPlan { - private HashSet<ReadEntity> inputs; - private HashSet<WriteEntity> outputs; + private final HashSet<ReadEntity> inputs; + private final HashSet<WriteEntity> outputs; MockQueryPlan(TestDbTxnManager test) { HashSet<ReadEntity> r = test.readEntities; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java index 19f82ad..0fc87ae 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDummyTxnManager.java @@ -33,8 +33,6 @@ import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; import org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLock; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; @@ -50,7 +48,7 @@ import java.util.List; @RunWith(MockitoJUnitRunner.class) public class TestDummyTxnManager { - private HiveConf conf = new HiveConf(); + private final HiveConf conf = new HiveConf(); private HiveTxnManager txnMgr; private Context ctx; private int nextInput = 1; @@ -67,7 +65,6 @@ public class TestDummyTxnManager { conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, DummyTxnManager.class.getName()); SessionState.start(conf); ctx = new Context(conf); - LogManager.getRootLogger().setLevel(Level.DEBUG); txnMgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); Assert.assertTrue(txnMgr instanceof DummyTxnManager); @@ -116,8 +113,8 @@ public class TestDummyTxnManager { Assert.assertEquals(expectedLocks.get(1).getHiveLockMode(), resultLocks.get(1).getHiveLockMode()); Assert.assertEquals(expectedLocks.get(0).getHiveLockObject().getName(), resultLocks.get(0).getHiveLockObject().getName()); - verify(mockLockManager).lock((List<HiveLockObj>)lockObjsCaptor.capture(), eq(false)); - List<HiveLockObj> lockObjs = (List<HiveLockObj>)lockObjsCaptor.getValue(); + verify(mockLockManager).lock(lockObjsCaptor.capture(), eq(false)); + List<HiveLockObj> lockObjs = lockObjsCaptor.getValue(); Assert.assertEquals(2, lockObjs.size()); Assert.assertEquals("default", lockObjs.get(0).getName()); Assert.assertEquals(HiveLockMode.SHARED, lockObjs.get(0).mode); @@ -157,6 +154,7 @@ public class TestDummyTxnManager { Assert.assertEquals("Locks should be deduped", 2, lockObjs.size()); Comparator<HiveLockObj> cmp = new Comparator<HiveLockObj>() { + @Override public int compare(HiveLockObj lock1, HiveLockObj lock2) { return lock1.getName().compareTo(lock2.getName()); } http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java b/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java index bdd837e..cc2ad4e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java @@ -54,7 +54,7 @@ public class TestLog4j2Appenders { appender.addToLogger(logger.getName(), Level.INFO); appender.start(); - // Log to the string appender + // Logger to the string appender logger.info("Hello!"); logger.info(" World"); http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java index f78f226..f0f014c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java @@ -23,8 +23,8 @@ import java.util.Arrays; import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -45,7 +45,7 @@ import org.junit.Test; public class TestUpdateDeleteSemanticAnalyzer { - static final private Log LOG = LogFactory.getLog(TestUpdateDeleteSemanticAnalyzer.class.getName()); + static final private Logger LOG = LoggerFactory.getLogger(TestUpdateDeleteSemanticAnalyzer.class.getName()); private HiveConf conf; private Hive db; http://git-wip-us.apache.org/repos/asf/hive/blob/55337444/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java ---------------------------------------------------------------------- diff --git a/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java b/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java index 70985b3..145a531 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java @@ -30,8 +30,8 @@ import java.util.Collection; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -61,7 +61,7 @@ public class TestSessionState { private final static String V2 = "V2"; private static String hiveReloadPath; private File reloadFolder; - public static final Log LOG = LogFactory.getLog(TestSessionState.class); + public static final Logger LOG = LoggerFactory.getLogger(TestSessionState.class); public TestSessionState(Boolean mode) { this.prewarm = mode.booleanValue();
