Modified: hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_precision.q.out URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_precision.q.out?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_precision.q.out (original) +++ hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_precision.q.out Tue Nov 18 00:48:40 2014 @@ -99,13 +99,13 @@ NULL NULL NULL NULL -0.0000000000 -0.0000000000 -0.0000000000 -0.0000000000 0 -0.1234567890 -0.1234567890 +0 +0 +0 +0 +0.123456789 +0.123456789 1.2345678901 1.2345678901 1.2345678901 @@ -129,7 +129,7 @@ NULL 123456789.0123456 123456789.0123456789 1234567890.123456 -1234567890.1234567890 +1234567890.123456789 PREHOOK: query: SELECT dec, dec + 1, dec - 1 FROM DECIMAL_PRECISION ORDER BY dec PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision @@ -182,13 +182,13 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -0.0000000000 1.0000000000 -1.0000000000 -0.0000000000 1.0000000000 -1.0000000000 -0.0000000000 1.0000000000 -1.0000000000 -0.0000000000 1.0000000000 -1.0000000000 0 1 -1 -0.1234567890 1.1234567890 -0.8765432110 -0.1234567890 1.1234567890 -0.8765432110 +0 1 -1 +0 1 -1 +0 1 -1 +0 1 -1 +0.123456789 1.123456789 -0.876543211 +0.123456789 1.123456789 -0.876543211 1.2345678901 2.2345678901 0.2345678901 1.2345678901 2.2345678901 0.2345678901 1.2345678901 2.2345678901 0.2345678901 @@ -212,7 +212,7 @@ NULL NULL NULL 123456789.0123456 123456790.0123456 123456788.0123456 123456789.0123456789 123456790.0123456789 123456788.0123456789 1234567890.123456 1234567891.123456 1234567889.123456 -1234567890.1234567890 1234567891.1234567890 1234567889.1234567890 +1234567890.123456789 1234567891.123456789 1234567889.123456789 PREHOOK: query: SELECT dec, dec * 2, dec / 3 FROM DECIMAL_PRECISION ORDER BY dec PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision @@ -265,13 +265,13 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL -0.0000000000 0.0000000000 0 -0.0000000000 0.0000000000 0 -0.0000000000 0.0000000000 0 -0.0000000000 0.0000000000 0 0 0 0 -0.1234567890 0.2469135780 0.041152263 -0.1234567890 0.2469135780 0.041152263 +0 0 0 +0 0 0 +0 0 0 +0 0 0 +0.123456789 0.246913578 0.041152263 +0.123456789 0.246913578 0.041152263 1.2345678901 2.4691357802 0.411522630033 1.2345678901 2.4691357802 0.411522630033 1.2345678901 2.4691357802 0.411522630033 @@ -281,9 +281,9 @@ NULL NULL NULL 123.4567890123 246.9135780246 41.1522630041 123.4567890123 246.9135780246 41.1522630041 123.4567890123 246.9135780246 41.1522630041 -1234.5678901235 2469.1357802470 411.522630041167 -1234.5678901235 2469.1357802470 411.522630041167 -1234.5678901235 2469.1357802470 411.522630041167 +1234.5678901235 2469.135780247 411.522630041167 +1234.5678901235 2469.135780247 411.522630041167 +1234.5678901235 2469.135780247 411.522630041167 12345.6789012346 24691.3578024692 4115.226300411533 12345.6789012346 24691.3578024692 4115.226300411533 123456.7890123456 246913.5780246912 41152.2630041152 @@ -295,7 +295,7 @@ NULL NULL NULL 123456789.0123456 246913578.0246912 41152263.0041152 123456789.0123456789 246913578.0246913578 41152263.0041152263 1234567890.123456 2469135780.246912 411522630.041152 -1234567890.1234567890 2469135780.2469135780 411522630.041152263 +1234567890.123456789 2469135780.246913578 411522630.041152263 PREHOOK: query: SELECT dec, dec / 9 FROM DECIMAL_PRECISION ORDER BY dec PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision @@ -348,13 +348,13 @@ NULL NULL NULL NULL NULL NULL NULL NULL -0.0000000000 0 -0.0000000000 0 -0.0000000000 0 -0.0000000000 0 0 0 -0.1234567890 0.013717421 -0.1234567890 0.013717421 +0 0 +0 0 +0 0 +0 0 +0.123456789 0.013717421 +0.123456789 0.013717421 1.2345678901 0.137174210011 1.2345678901 0.137174210011 1.2345678901 0.137174210011 @@ -378,7 +378,7 @@ NULL NULL 123456789.0123456 13717421.001371733333 123456789.0123456789 13717421.0013717421 1234567890.123456 137174210.013717333333 -1234567890.1234567890 137174210.013717421 +1234567890.123456789 137174210.013717421 PREHOOK: query: SELECT dec, dec / 27 FROM DECIMAL_PRECISION ORDER BY dec PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision @@ -431,13 +431,13 @@ NULL NULL NULL NULL NULL NULL NULL NULL -0.0000000000 0 -0.0000000000 0 -0.0000000000 0 -0.0000000000 0 0 0 -0.1234567890 0.0045724736667 -0.1234567890 0.0045724736667 +0 0 +0 0 +0 0 +0 0 +0.123456789 0.0045724736667 +0.123456789 0.0045724736667 1.2345678901 0.0457247366704 1.2345678901 0.0457247366704 1.2345678901 0.0457247366704 @@ -461,7 +461,7 @@ NULL NULL 123456789.0123456 4572473.6671239111111 123456789.0123456789 4572473.6671239140333 1234567890.123456 45724736.6712391111111 -1234567890.1234567890 45724736.6712391403333 +1234567890.123456789 45724736.6712391403333 PREHOOK: query: SELECT dec, dec * dec FROM DECIMAL_PRECISION ORDER BY dec PREHOOK: type: QUERY PREHOOK: Input: default@decimal_precision @@ -514,13 +514,13 @@ NULL NULL NULL NULL NULL NULL NULL NULL -0.0000000000 0.00000000000000000000 -0.0000000000 0.00000000000000000000 -0.0000000000 0.00000000000000000000 -0.0000000000 0.00000000000000000000 0 0 -0.1234567890 0.01524157875019052100 -0.1234567890 0.01524157875019052100 +0 0 +0 0 +0 0 +0 0 +0.123456789 0.015241578750190521 +0.123456789 0.015241578750190521 1.2345678901 1.52415787526596567801 1.2345678901 1.52415787526596567801 1.2345678901 1.52415787526596567801 @@ -544,7 +544,7 @@ NULL NULL 123456789.0123456 15241578753238817.26870921383936 123456789.0123456789 15241578753238836.75019051998750190521 1234567890.123456 NULL -1234567890.1234567890 NULL +1234567890.123456789 NULL PREHOOK: query: EXPLAIN SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION
Modified: hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out (original) +++ hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_round_2.q.out Tue Nov 18 00:48:40 2014 @@ -114,7 +114,7 @@ FROM decimal_tbl_1_orc ORDER BY d POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_tbl_1_orc #### A masked pattern was here #### -55555 55555 55555.0 55555.00 55555.000 55560 55600 56000 60000 100000 0 0 0 +55555 55555 55555 55555 55555 55560 55600 56000 60000 100000 0 0 0 PREHOOK: query: create table decimal_tbl_2_orc (pos decimal(38,18), neg decimal(38,18)) STORED AS ORC PREHOOK: type: CREATETABLE @@ -226,7 +226,7 @@ FROM decimal_tbl_2_orc ORDER BY p POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_tbl_2_orc #### A masked pattern was here #### -125 125 125.3 125.32 125.315 125.3150 130 100 0 0 -125 -125 -125.3 -125.32 -125.315 -125.3150 -130 -100 0 0 +125 125 125.3 125.32 125.315 125.315 130 100 0 0 -125 -125 -125.3 -125.32 -125.315 -125.315 -130 -100 0 0 PREHOOK: query: create table decimal_tbl_3_orc (dec decimal(38,18)) STORED AS ORC PREHOOK: type: CREATETABLE @@ -381,7 +381,7 @@ FROM decimal_tbl_3_orc ORDER BY d POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_tbl_3_orc #### A masked pattern was here #### -0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 3.1 3.14 3.142 3.1416 3.14159 3.141593 3.1415927 3.14159265 3.141592654 3.1415926536 3.14159265359 3.141592653590 3.1415926535898 3.1415926535898 3.14159265358979 3.141592653589793 3.1415926535897930 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 3.1 3.14 3.142 3.1416 3.14159 3.141593 3.1415927 3.14159265 3.141592654 3.1415926536 3.14159265359 3.14159265359 3.1415926535898 3.1415926535898 3.14159265358979 3.141592653589793 3.141592653589793 PREHOOK: query: create table decimal_tbl_4_orc (pos decimal(38,18), neg decimal(38,18)) STORED AS ORC PREHOOK: type: CREATETABLE Modified: hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out (original) +++ hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_trailing.q.out Tue Nov 18 00:48:40 2014 @@ -76,13 +76,13 @@ POSTHOOK: Input: default@decimal_trailin 0 0 0 1 0 0 2 NULL NULL -3 1.0000 1.00000000 -4 10.0000 10.00000000 -5 100.0000 100.00000000 -6 1000.0000 1000.00000000 -7 10000.0000 10000.00000000 -8 100000.0000 100000.00000000 -9 NULL 1000000.00000000 +3 1 1 +4 10 10 +5 100 100 +6 1000 1000 +7 10000 10000 +8 100000 100000 +9 NULL 1000000 10 NULL NULL 11 NULL NULL 12 NULL NULL @@ -91,18 +91,18 @@ POSTHOOK: Input: default@decimal_trailin 15 NULL NULL 16 NULL NULL 17 NULL NULL -18 1.0000 1.00000000 -19 10.000 10.0000000 -20 100.00 100.000000 -21 1000.0 1000.00000 -22 100000 10000.0000 -23 0.0000 0.00000000 -24 0.000 0.0000000 -25 0.00 0.000000 -26 0.0 0.00000 -27 0 0.00000 -28 12313.2000 134134.31252500 -29 99999.9990 134134.31242553 +18 1 1 +19 10 10 +20 100 100 +21 1000 1000 +22 100000 10000 +23 0 0 +24 0 0 +25 0 0 +26 0 0 +27 0 0 +28 12313.2 134134.312525 +29 99999.999 134134.31242553 PREHOOK: query: DROP TABLE DECIMAL_TRAILING_txt PREHOOK: type: DROPTABLE PREHOOK: Input: default@decimal_trailing_txt Modified: hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_udf.q.out URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_udf.q.out?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_udf.q.out (original) +++ hive/branches/spark/ql/src/test/results/clientpositive/vector_decimal_udf.q.out Tue Nov 18 00:48:40 2014 @@ -94,7 +94,7 @@ POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### -8800 NULL -0.0000000000 +0 0 200 20 @@ -113,7 +113,7 @@ NULL -0.6 -0.66 -0.666 -2.0 +2 4 6.28 -2.24 @@ -121,15 +121,15 @@ NULL -2.244 2.24 2.244 -248.00 +248 250.4 -2510.98 6.28 6.28 -6.280 -2.0000000000 --2469135780.2469135780 -2469135780.2469135600 +6.28 +2 +-2469135780.246913578 +2469135780.24691356 PREHOOK: query: EXPLAIN SELECT key + value FROM DECIMAL_UDF PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT key + value FROM DECIMAL_UDF @@ -174,7 +174,7 @@ POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### 0 NULL -0.0000000000 +0 0 200 20 @@ -193,7 +193,7 @@ NULL -0.3 -0.33 -0.333 -2.0 +2 4 6.14 -2.12 @@ -201,15 +201,15 @@ NULL -12.122 2.12 2.122 -248.00 +248 250.2 -2510.49 6.14 6.14 -7.140 -2.0000000000 --2469135780.1234567890 -2469135780.1234567800 +7.14 +2 +-2469135780.123456789 +2469135780.12345678 PREHOOK: query: EXPLAIN SELECT key + (value/2) FROM DECIMAL_UDF PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT key + (value/2) FROM DECIMAL_UDF @@ -415,42 +415,42 @@ POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### 0 NULL -0.0000000000 0 0 0 0 -0.0 -0.00 0 0 0 0 -0.0 -0.00 -0.0 -0.00 -0.000 -0.0 -0.00 -0.000 -0.0 0 -0.00 -0.00 -0.00 -0.000 -0.00 -0.000 -0.00 -0.0 -0.00 -0.00 -0.00 -0.000 -0.0000000000 -0.0000000000 -0.0000000000 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 +0 PREHOOK: query: EXPLAIN SELECT key - value FROM DECIMAL_UDF PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT key - value FROM DECIMAL_UDF @@ -495,7 +495,7 @@ POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### -8800 NULL -0.0000000000 +0 0 0 0 @@ -514,7 +514,7 @@ NULL -0.3 -0.33 -0.333 -0.0 +0 0 0.14 -0.12 @@ -522,15 +522,15 @@ NULL 9.878 0.12 0.122 -0.00 +0 0.2 -0.49 0.14 0.14 --0.860 -0.0000000000 --0.1234567890 -0.1234567800 +-0.86 +0 +-0.123456789 +0.12345678 PREHOOK: query: EXPLAIN SELECT key - (value/2) FROM DECIMAL_UDF PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT key - (value/2) FROM DECIMAL_UDF @@ -736,7 +736,7 @@ POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### 19360000 NULL -0.00000000000000000000 +0 0 10000 100 @@ -755,7 +755,7 @@ NULL 0.09 0.1089 0.110889 -1.00 +1 4 9.8596 1.2544 @@ -763,13 +763,13 @@ NULL 1.258884 1.2544 1.258884 -15376.0000 +15376 15675.04 1576255.1401 9.8596 9.8596 -9.859600 -1.00000000000000000000 +9.8596 +1 NULL NULL PREHOOK: query: EXPLAIN SELECT key, value FROM DECIMAL_UDF where key * value > 0 @@ -823,7 +823,7 @@ POSTHOOK: Input: default@decimal_udf 200 200 20 20 2 2 -1.0 1 +1 1 2 2 3.14 3 -1.12 -1 @@ -831,15 +831,15 @@ POSTHOOK: Input: default@decimal_udf -1.122 -11 1.12 1 1.122 1 -124.00 124 +124 124 125.2 125 -1255.49 -1255 3.14 3 3.14 3 -3.140 4 -1.0000000000 1 --1234567890.1234567890 -1234567890 -1234567890.1234567800 1234567890 +3.14 4 +1 1 +-1234567890.123456789 -1234567890 +1234567890.12345678 1234567890 PREHOOK: query: EXPLAIN SELECT key * value FROM DECIMAL_UDF PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT key * value FROM DECIMAL_UDF @@ -884,26 +884,26 @@ POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### -19360000 NULL -0.0000000000 +0 0 10000 100 1 -0.0 -0.00 +0 +0 40000 400 4 0 -0.0 -0.00 -0.0 -0.00 -0.000 -0.0 -0.00 -0.000 -1.0 +0 +0 +0 +0 +0 +0 +0 +0 +1 4 9.42 1.12 @@ -911,15 +911,15 @@ NULL 12.342 1.12 1.122 -15376.00 -15650.0 +15376 +15650 1575639.95 9.42 9.42 -12.560 -1.0000000000 -1524157875171467887.5019052100 -1524157875171467876.3907942000 +12.56 +1 +1524157875171467887.50190521 +1524157875171467876.3907942 PREHOOK: query: EXPLAIN SELECT key * (value/2) FROM DECIMAL_UDF PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT key * (value/2) FROM DECIMAL_UDF @@ -1319,7 +1319,7 @@ POSTHOOK: Input: default@decimal_udf 0.785 1 1.0000000001 -1.000000000099999992710 +1.00000000009999999271 PREHOOK: query: EXPLAIN SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 PREHOOK: type: QUERY POSTHOOK: query: EXPLAIN SELECT key / (value/2) FROM DECIMAL_UDF WHERE value is not null and value <> 0 @@ -1514,7 +1514,7 @@ POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### 4400 NULL -0.0000000000 +0 0 100 10 @@ -1533,7 +1533,7 @@ NULL 0.3 0.33 0.333 -1.0 +1 2 3.14 1.12 @@ -1541,15 +1541,15 @@ NULL 1.122 1.12 1.122 -124.00 +124 125.2 1255.49 3.14 3.14 -3.140 -1.0000000000 -1234567890.1234567890 -1234567890.1234567800 +3.14 +1 +1234567890.123456789 +1234567890.12345678 PREHOOK: query: -- avg EXPLAIN SELECT value, sum(key) / count(key), avg(key), sum(key) FROM DECIMAL_UDF GROUP BY value ORDER BY value PREHOOK: type: QUERY @@ -1639,23 +1639,23 @@ POSTHOOK: query: SELECT value, sum(key) POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### --1234567890 -1234567890.123456789 -1234567890.123456789 -1234567890.1234567890 +-1234567890 -1234567890.123456789 -1234567890.123456789 -1234567890.123456789 -1255 -1255.49 -1255.49 -1255.49 -11 -1.122 -1.122 -1.122 -1 -1.12 -1.12 -2.24 -0 0.02538461538461538461538 0.02538461538462 0.3300000000 -1 1.0484 1.0484 5.2420000000 +0 0.02538461538461538461538 0.02538461538462 0.33 +1 1.0484 1.0484 5.242 2 2 2 4 3 3.14 3.14 9.42 -4 3.14 3.14 3.140 +4 3.14 3.14 3.14 10 10 10 10 20 20 20 20 100 100 100 100 -124 124 124 124.00 +124 124 124 124 125 125.2 125.2 125.2 200 200 200 200 4400 -4400 -4400 -4400 -1234567890 1234567890.12345678 1234567890.12345678 1234567890.1234567800 +1234567890 1234567890.12345678 1234567890.12345678 1234567890.12345678 PREHOOK: query: -- negative EXPLAIN SELECT -key FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -1702,7 +1702,7 @@ POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### 4400 NULL -0.0000000000 +0 0 -100 -10 @@ -1721,7 +1721,7 @@ NULL 0.3 0.33 0.333 --1.0 +-1 -2 -3.14 1.12 @@ -1729,15 +1729,15 @@ NULL 1.122 -1.12 -1.122 --124.00 +-124 -125.2 1255.49 -3.14 -3.14 --3.140 --1.0000000000 -1234567890.1234567890 --1234567890.1234567800 +-3.14 +-1 +1234567890.123456789 +-1234567890.12345678 PREHOOK: query: -- positive EXPLAIN SELECT +key FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -1771,7 +1771,7 @@ POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### -4400 NULL -0.0000000000 +0 0 100 10 @@ -1790,7 +1790,7 @@ NULL -0.3 -0.33 -0.333 -1.0 +1 2 3.14 -1.12 @@ -1798,15 +1798,15 @@ NULL -1.122 1.12 1.122 -124.00 +124 125.2 -1255.49 3.14 3.14 -3.140 -1.0000000000 --1234567890.1234567890 -1234567890.1234567800 +3.14 +1 +-1234567890.123456789 +1234567890.12345678 PREHOOK: query: -- ceiling EXPlAIN SELECT CEIL(key) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -2015,42 +2015,42 @@ POSTHOOK: query: SELECT ROUND(key, 2) FR POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### --4400.00 +-4400 NULL -0.00 -0.00 -100.00 -10.00 -1.00 -0.10 +0 +0 +100 +10 +1 +0.1 0.01 -200.00 -20.00 -2.00 -0.00 -0.20 +200 +20 +2 +0 +0.2 0.02 -0.30 +0.3 0.33 0.33 --0.30 +-0.3 -0.33 -0.33 -1.00 -2.00 +1 +2 3.14 -1.12 -1.12 -1.12 1.12 1.12 -124.00 -125.20 +124 +125.2 -1255.49 3.14 3.14 3.14 -1.00 +1 -1234567890.12 1234567890.12 PREHOOK: query: -- power @@ -2184,38 +2184,38 @@ NULL NULL 1 1 -0.0 -0.00 -0.000 +0 +0 +0 1 1 0 NULL -0.0 -0.00 -0.10 -0.010 -0.0010 -0.10 -0.010 -0.0010 -0.0 0 -1.00 +0 +0.1 +0.01 +0.001 +0.1 +0.01 +0.001 +0 +0 +1 -0.12 -0.12 -0.122 0.44 0.439 -1.00 -1.0 +1 +1 -626.745 -1.00 -1.00 -1.000 -0.0000000000 +1 +1 +1 +0 -617283944.0617283945 -1.0000000000 +1 PREHOOK: query: -- stddev, var EXPLAIN SELECT value, stddev(key), variance(key) FROM DECIMAL_UDF GROUP BY value PREHOOK: type: QUERY @@ -2510,7 +2510,7 @@ POSTHOOK: query: SELECT MIN(key) FROM DE POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### --1234567890.1234567890 +-1234567890.123456789 PREHOOK: query: -- max EXPLAIN SELECT MAX(key) FROM DECIMAL_UDF PREHOOK: type: QUERY @@ -2574,7 +2574,7 @@ POSTHOOK: query: SELECT MAX(key) FROM DE POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_udf #### A masked pattern was here #### -1234567890.1234567800 +1234567890.12345678 PREHOOK: query: -- count EXPLAIN SELECT COUNT(key) FROM DECIMAL_UDF PREHOOK: type: QUERY Modified: hive/branches/spark/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out (original) +++ hive/branches/spark/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out Tue Nov 18 00:48:40 2014 @@ -134,7 +134,7 @@ POSTHOOK: Input: default@decimal_test -1066226047 -9439.0 -5637.8891891892 -6752.515384615385 -5637.8891891892 -1065117869 2538.0 1515.9405405405 1815.646153846154 1515.9405405405 -1064949302 6454.0 3854.9567567568 4617.092307692308 3854.9567567568 --1063498122 -11480.0 -6856.9729729730 -8212.615384615387 -6856.9729729730 +-1063498122 -11480.0 -6856.972972973 -8212.615384615387 -6856.972972973 -1062973443 10541.0 6296.1108108108 7540.869230769231 6296.1108108108 -1061614989 -4234.0 -2528.9567567568 -3028.938461538462 -2528.9567567568 -1061057428 -1085.0 -648.0675675676 -776.1923076923077 -648.0675675676 @@ -142,14 +142,14 @@ POSTHOOK: Input: default@decimal_test -1059338191 7322.0 4373.4108108108 5238.046153846154 4373.4108108108 -1059047258 12452.0 7437.5459459459 8907.969230769231 7437.5459459459 -1056684111 13991.0 8356.7864864865 10008.946153846155 8356.7864864865 --1055945837 13690.0 8177.0 9793.615384615387 8177.0 +-1055945837 13690.0 8177 9793.615384615387 8177 -1055669248 2570.0 1535.0540540541 1838.538461538462 1535.0540540541 -1055316250 -14990.0 -8953.4864864865 -10723.615384615385 -8953.4864864865 -1053385587 14504.0 8663.2 10375.938461538462 8663.2 -1053238077 -3704.0 -2212.3891891892 -2649.784615384616 -2212.3891891892 -1052745800 -12404.0 -7408.8756756757 -8873.630769230771 -7408.8756756757 -1052322972 -7433.0 -4439.7108108108 -5317.453846153847 -4439.7108108108 --1050684541 -8261.0 -4934.2729729730 -5909.792307692308 -4934.2729729730 +-1050684541 -8261.0 -4934.272972973 -5909.792307692308 -4934.272972973 -1050657303 -6999.0 -4180.4837837838 -5006.976923076923 -4180.4837837838 -1050165799 8634.0 5157.0648648649 6176.63076923077 5157.0648648649 -1048934049 -524.0 -312.9837837838 -374.86153846153854 -312.9837837838 @@ -160,12 +160,12 @@ POSTHOOK: Input: default@decimal_test -1045087657 -5865.0 -3503.1486486486 -4195.7307692307695 -3503.1486486486 -1044207190 5381.0 3214.0567567568 3849.4846153846156 3214.0567567568 -1044093617 -3422.0 -2043.9513513514 -2448.046153846154 -2043.9513513514 --1043573508 16216.0 9685.7729729730 11600.676923076924 9685.7729729730 +-1043573508 16216.0 9685.772972973 11600.676923076924 9685.772972973 -1043132597 12302.0 7347.9513513514 8800.66153846154 7347.9513513514 -1043082182 9180.0 5483.1891891892 6567.2307692307695 5483.1891891892 --1042805968 5133.0 3065.9270270270 3672.0692307692307 3065.9270270270 +-1042805968 5133.0 3065.927027027 3672.0692307692307 3065.927027027 -1042712895 9296.0 5552.4756756757 6650.215384615385 5552.4756756757 --1042396242 9583.0 5723.9000000000 6855.53076923077 5723.9000000000 +-1042396242 9583.0 5723.9 6855.53076923077 5723.9 -1041734429 -836.0 -499.3405405405 -598.0615384615385 -499.3405405405 -1041391389 -12970.0 -7746.9459459459 -9278.538461538463 -7746.9459459459 -1041252354 756.0 451.5567567568 540.8307692307692 451.5567567568 Modified: hive/branches/spark/ql/src/test/results/clientpositive/vectorized_ptf.q.out URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/vectorized_ptf.q.out?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== Files hive/branches/spark/ql/src/test/results/clientpositive/vectorized_ptf.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/vectorized_ptf.q.out Tue Nov 18 00:48:40 2014 differ Modified: hive/branches/spark/ql/src/test/results/clientpositive/windowing_decimal.q.out URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/windowing_decimal.q.out?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/results/clientpositive/windowing_decimal.q.out (original) +++ hive/branches/spark/ql/src/test/results/clientpositive/windowing_decimal.q.out Tue Nov 18 00:48:40 2014 @@ -57,8 +57,8 @@ from part_dec POSTHOOK: type: QUERY POSTHOOK: Input: default@part_dec #### A masked pattern was here #### -Manufacturer#1 1173.15 1173.15 2346.30 -Manufacturer#1 1173.15 1173.15 2346.30 +Manufacturer#1 1173.15 1173.15 2346.3 +Manufacturer#1 1173.15 1173.15 2346.3 Manufacturer#1 1414.42 1173.15 3760.72 Manufacturer#1 1602.59 1173.15 5363.31 Manufacturer#1 1632.66 1173.15 6995.97 @@ -76,7 +76,7 @@ Manufacturer#3 1922.98 1190.27 7532.61 Manufacturer#4 1206.26 1206.26 1206.26 Manufacturer#4 1290.35 1206.26 2496.61 Manufacturer#4 1375.42 1206.26 3872.03 -Manufacturer#4 1620.67 1206.26 5492.70 +Manufacturer#4 1620.67 1206.26 5492.7 Manufacturer#4 1844.92 1206.26 7337.62 Manufacturer#5 1018.1 1018.1 1018.1 Manufacturer#5 1464.48 1018.1 2482.58 @@ -97,8 +97,8 @@ from part_dec POSTHOOK: type: QUERY POSTHOOK: Input: default@part_dec #### A masked pattern was here #### -Manufacturer#1 1173.15 1173.15 2346.30 -Manufacturer#1 1173.15 1173.15 2346.30 +Manufacturer#1 1173.15 1173.15 2346.3 +Manufacturer#1 1173.15 1173.15 2346.3 Manufacturer#1 1414.42 1414.42 1414.42 Manufacturer#1 1602.59 1602.59 1602.59 Manufacturer#1 1632.66 1632.66 1632.66 Modified: hive/branches/spark/ql/src/test/results/clientpositive/windowing_navfn.q.out URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/windowing_navfn.q.out?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/results/clientpositive/windowing_navfn.q.out (original) +++ hive/branches/spark/ql/src/test/results/clientpositive/windowing_navfn.q.out Tue Nov 18 00:48:40 2014 @@ -277,13 +277,13 @@ POSTHOOK: Input: default@over10k 65536 98.42 65536 0.93 65536 83.48 -65536 75.70 +65536 75.7 65536 88.04 65536 94.09 65536 33.45 65536 44.41 65536 22.15 -65536 20.50 +65536 20.5 65536 58.86 65536 30.91 65536 74.47 @@ -300,9 +300,9 @@ POSTHOOK: Input: default@over10k 65536 80.26 65536 35.07 65536 95.88 -65536 30.60 +65536 30.6 65536 46.97 -65536 58.80 +65536 58.8 65536 5.72 65536 29.27 65536 62.25 @@ -326,7 +326,7 @@ POSTHOOK: Input: default@over10k 65537 35.86 65537 47.75 65537 1.12 -65537 52.90 +65537 52.9 65537 53.92 65537 43.45 65537 7.52 @@ -340,20 +340,20 @@ POSTHOOK: Input: default@over10k 65537 56.48 65537 83.21 65537 56.52 -65537 36.60 -65537 59.70 +65537 36.6 +65537 59.7 65537 80.14 -65537 66.30 +65537 66.3 65537 94.87 65537 40.92 -65537 25.20 +65537 25.2 65537 7.36 65538 NULL 65538 53.35 65538 54.64 65538 76.67 65538 15.17 -65538 1.20 +65538 1.2 65538 13.71 65538 81.59 65538 43.33 Modified: hive/branches/spark/ql/src/test/results/clientpositive/windowing_rank.q.out URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/windowing_rank.q.out?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/ql/src/test/results/clientpositive/windowing_rank.q.out (original) +++ hive/branches/spark/ql/src/test/results/clientpositive/windowing_rank.q.out Tue Nov 18 00:48:40 2014 @@ -508,16 +508,16 @@ where rnk = 1 limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k #### A masked pattern was here #### -2013-03-01 09:11:58.70307 0.50 1 -2013-03-01 09:11:58.70307 0.50 1 -2013-03-01 09:11:58.70307 0.50 1 -2013-03-01 09:11:58.70307 0.50 1 -2013-03-01 09:11:58.70307 0.50 1 -2013-03-01 09:11:58.70307 0.50 1 -2013-03-01 09:11:58.70307 0.50 1 -2013-03-01 09:11:58.70307 0.50 1 -2013-03-01 09:11:58.70307 0.50 1 -2013-03-01 09:11:58.70307 0.50 1 +2013-03-01 09:11:58.70307 0.5 1 +2013-03-01 09:11:58.70307 0.5 1 +2013-03-01 09:11:58.70307 0.5 1 +2013-03-01 09:11:58.70307 0.5 1 +2013-03-01 09:11:58.70307 0.5 1 +2013-03-01 09:11:58.70307 0.5 1 +2013-03-01 09:11:58.70307 0.5 1 +2013-03-01 09:11:58.70307 0.5 1 +2013-03-01 09:11:58.70307 0.5 1 +2013-03-01 09:11:58.70307 0.5 1 PREHOOK: query: select ts, dec, rnk from (select ts, dec, @@ -546,16 +546,16 @@ where dec = 89.5 limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k #### A masked pattern was here #### -2013-03-01 09:11:58.703124 89.50 1 -2013-03-01 09:11:58.703124 89.50 1 -2013-03-01 09:11:58.703124 89.50 1 -2013-03-01 09:11:58.703124 89.50 1 -2013-03-01 09:11:58.703124 89.50 1 -2013-03-01 09:11:58.703124 89.50 1 -2013-03-01 09:11:58.703124 89.50 1 -2013-03-01 09:11:58.703124 89.50 1 -2013-03-01 09:11:58.703124 89.50 1 -2013-03-01 09:11:58.703124 89.50 1 +2013-03-01 09:11:58.703124 89.5 1 +2013-03-01 09:11:58.703124 89.5 1 +2013-03-01 09:11:58.703124 89.5 1 +2013-03-01 09:11:58.703124 89.5 1 +2013-03-01 09:11:58.703124 89.5 1 +2013-03-01 09:11:58.703124 89.5 1 +2013-03-01 09:11:58.703124 89.5 1 +2013-03-01 09:11:58.703124 89.5 1 +2013-03-01 09:11:58.703124 89.5 1 +2013-03-01 09:11:58.703124 89.5 1 PREHOOK: query: select ts, dec, rnk from (select ts, dec, @@ -586,13 +586,13 @@ where rnk = 1 limit 10 POSTHOOK: type: QUERY POSTHOOK: Input: default@over10k #### A masked pattern was here #### -2013-03-01 09:11:58.70307 37.30 1 -2013-03-01 09:11:58.70307 37.30 1 -2013-03-01 09:11:58.70307 37.30 1 -2013-03-01 09:11:58.70307 37.30 1 -2013-03-01 09:11:58.70307 37.30 1 -2013-03-01 09:11:58.70307 37.30 1 -2013-03-01 09:11:58.70307 37.30 1 -2013-03-01 09:11:58.70307 37.30 1 -2013-03-01 09:11:58.70307 37.30 1 -2013-03-01 09:11:58.70307 37.30 1 +2013-03-01 09:11:58.70307 37.3 1 +2013-03-01 09:11:58.70307 37.3 1 +2013-03-01 09:11:58.70307 37.3 1 +2013-03-01 09:11:58.70307 37.3 1 +2013-03-01 09:11:58.70307 37.3 1 +2013-03-01 09:11:58.70307 37.3 1 +2013-03-01 09:11:58.70307 37.3 1 +2013-03-01 09:11:58.70307 37.3 1 +2013-03-01 09:11:58.70307 37.3 1 +2013-03-01 09:11:58.70307 37.3 1 Modified: hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorException.java URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorException.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorException.java (original) +++ hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroObjectInspectorException.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.serde2.avro; /** Modified: hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java (original) +++ hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java Tue Nov 18 00:48:40 2014 @@ -779,7 +779,7 @@ public class BinarySortableSerDe extends // get the scale factor to turn big decimal into a decimal < 1 int factor = dec.precision() - dec.scale(); - factor = sign != -1 ? factor : -factor; + factor = sign == 1 ? factor : -factor; // convert the absolute big decimal to string dec.scaleByPowerOfTen(Math.abs(dec.scale())); Modified: hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (original) +++ hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java Tue Nov 18 00:48:40 2014 @@ -951,9 +951,9 @@ public final class ObjectInspectorUtils if (childFieldsList1 == null && childFieldsList2 == null) { return true; - } - - if (childFieldsList1.size() != childFieldsList2.size()) { + } else if (childFieldsList1 == null || childFieldsList2 == null) { + return false; + } else if (childFieldsList1.size() != childFieldsList2.size()) { return false; } Modified: hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java (original) +++ hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java Tue Nov 18 00:48:40 2014 @@ -1,3 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package org.apache.hadoop.hive.serde2.io; import com.google.code.tempusfugit.concurrency.annotations.*; Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java (original) +++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java Tue Nov 18 00:48:40 2014 @@ -218,6 +218,7 @@ public class HiveAuthFactory { throws TTransportException { InetSocketAddress serverAddress; if (hiveHost == null || hiveHost.isEmpty()) { + // Wildcard bind serverAddress = new InetSocketAddress(portNum); } else { serverAddress = new InetSocketAddress(hiveHost, portNum); @@ -226,25 +227,26 @@ public class HiveAuthFactory { } public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, String keyStorePath, - String keyStorePassWord, List<String> sslVersionBlacklist) - throws TTransportException, UnknownHostException { + String keyStorePassWord, List<String> sslVersionBlacklist) throws TTransportException, + UnknownHostException { TSSLTransportFactory.TSSLTransportParameters params = - new TSSLTransportFactory.TSSLTransportParameters(); + new TSSLTransportFactory.TSSLTransportParameters(); params.setKeyStore(keyStorePath, keyStorePassWord); - - InetAddress serverAddress; + InetSocketAddress serverAddress; if (hiveHost == null || hiveHost.isEmpty()) { - serverAddress = InetAddress.getLocalHost(); + // Wildcard bind + serverAddress = new InetSocketAddress(portNum); } else { - serverAddress = InetAddress.getByName(hiveHost); + serverAddress = new InetSocketAddress(hiveHost, portNum); } - TServerSocket thriftServerSocket = TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress, params); + TServerSocket thriftServerSocket = + TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress.getAddress(), params); if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) { List<String> sslVersionBlacklistLocal = new ArrayList<String>(); for (String sslVersion : sslVersionBlacklist) { sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase()); } - SSLServerSocket sslServerSocket = (SSLServerSocket)thriftServerSocket.getServerSocket(); + SSLServerSocket sslServerSocket = (SSLServerSocket) thriftServerSocket.getServerSocket(); List<String> enabledProtocols = new ArrayList<String>(); for (String protocol : sslServerSocket.getEnabledProtocols()) { if (sslVersionBlacklistLocal.contains(protocol.toLowerCase())) { @@ -254,7 +256,8 @@ public class HiveAuthFactory { } } sslServerSocket.setEnabledProtocols(enabledProtocols.toArray(new String[0])); - LOG.info("SSL Server Socket Enabled Protocols: " + Arrays.toString(sslServerSocket.getEnabledProtocols())); + LOG.info("SSL Server Socket Enabled Protocols: " + + Arrays.toString(sslServerSocket.getEnabledProtocols())); } return thriftServerSocket; } Modified: hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java (original) +++ hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java Tue Nov 18 00:48:40 2014 @@ -18,13 +18,17 @@ package org.apache.hive.service.cli.operation; import java.io.CharArrayWriter; +import java.util.regex.Pattern; +import org.apache.hadoop.hive.ql.exec.Task; import org.apache.log4j.Layout; import org.apache.log4j.Logger; import org.apache.log4j.WriterAppender; import org.apache.log4j.spi.Filter; import org.apache.log4j.spi.LoggingEvent; +import com.google.common.base.Joiner; + /** * An Appender to divert logs from individual threads to the LogObject they belong to. */ @@ -33,20 +37,29 @@ public class LogDivertAppender extends W private final OperationManager operationManager; /** - * A log filter that exclude messages coming from the logger with the given name. - * We apply this filter on the Loggers used by the log diversion stuff, so that + * A log filter that filters messages coming from the logger with the given names. + * It be used as a white list filter or a black list filter. + * We apply black list filter on the Loggers used by the log diversion stuff, so that * they don't generate more logs for themselves when they process logs. + * White list filter is used for less verbose log collection */ - private static class NameExclusionFilter extends Filter { - private String excludeLoggerName = null; - - public NameExclusionFilter(String excludeLoggerName) { - this.excludeLoggerName = excludeLoggerName; + private static class NameFilter extends Filter { + private final Pattern namePattern; + private final boolean excludeMatches; + + public NameFilter(boolean isExclusionFilter, String [] loggerNames) { + this.excludeMatches = isExclusionFilter; + String matchRegex = Joiner.on("|").join(loggerNames); + this.namePattern = Pattern.compile(matchRegex); } @Override public int decide(LoggingEvent ev) { - if (ev.getLoggerName().equals(excludeLoggerName)) { + boolean isMatch = namePattern.matcher(ev.getLoggerName()).matches(); + if (excludeMatches == isMatch) { + // Deny if this is black-list filter (excludeMatches = true) and it + // matched + // or if this is whitelist filter and it didn't match return Filter.DENY; } return Filter.NEUTRAL; @@ -56,21 +69,29 @@ public class LogDivertAppender extends W /** This is where the log message will go to */ private final CharArrayWriter writer = new CharArrayWriter(); - public LogDivertAppender(Layout layout, OperationManager operationManager) { + public LogDivertAppender(Layout layout, OperationManager operationManager, boolean isVerbose) { setLayout(layout); setWriter(writer); setName("LogDivertAppender"); this.operationManager = operationManager; - // Filter out messages coming from log processing classes, or we'll run an infinite loop. - addFilter(new NameExclusionFilter(LOG.getName())); - addFilter(new NameExclusionFilter(OperationLog.class.getName())); - addFilter(new NameExclusionFilter(OperationManager.class.getName())); + if (isVerbose) { + // Filter out messages coming from log processing classes, or we'll run an + // infinite loop. + String[] exclLoggerNames = { LOG.getName(), OperationLog.class.getName(), + OperationManager.class.getName() }; + addFilter(new NameFilter(true, exclLoggerNames)); + } else { + // in non verbose mode, show only select logger messages + String[] inclLoggerNames = { "org.apache.hadoop.mapreduce.JobSubmitter", + "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName() }; + addFilter(new NameFilter(false, inclLoggerNames)); + } } /** - * Overrides WriterAppender.subAppend(), which does the real logging. - * No need to worry about concurrency since log4j calls this synchronously. + * Overrides WriterAppender.subAppend(), which does the real logging. No need + * to worry about concurrency since log4j calls this synchronously. */ @Override protected void subAppend(LoggingEvent event) { Modified: hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java (original) +++ hive/branches/spark/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java Tue Nov 18 00:48:40 2014 @@ -18,8 +18,8 @@ package org.apache.hive.service.cli.operation; -import java.util.Enumeration; import java.util.ArrayList; +import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -30,16 +30,26 @@ import org.apache.hadoop.hive.conf.HiveC import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hive.service.AbstractService; -import org.apache.hive.service.cli.*; +import org.apache.hive.service.cli.FetchOrientation; +import org.apache.hive.service.cli.HiveSQLException; +import org.apache.hive.service.cli.OperationHandle; +import org.apache.hive.service.cli.OperationState; +import org.apache.hive.service.cli.OperationStatus; +import org.apache.hive.service.cli.RowSet; +import org.apache.hive.service.cli.RowSetFactory; +import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; -import org.apache.log4j.*; +import org.apache.log4j.Appender; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Layout; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; /** * OperationManager. * */ public class OperationManager extends AbstractService { - private static final String DEFAULT_LAYOUT_PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n"; private final Log LOG = LogFactory.getLog(OperationManager.class.getName()); private HiveConf hiveConf; @@ -54,7 +64,8 @@ public class OperationManager extends Ab public synchronized void init(HiveConf hiveConf) { this.hiveConf = hiveConf; if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) { - initOperationLogCapture(); + boolean isVerbose = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE); + initOperationLogCapture(isVerbose); } else { LOG.debug("Operation level logging is turned off"); } @@ -73,7 +84,7 @@ public class OperationManager extends Ab super.stop(); } - private void initOperationLogCapture() { + private void initOperationLogCapture(boolean isVerbose) { // There should be a ConsoleAppender. Copy its Layout. Logger root = Logger.getRootLogger(); Layout layout = null; @@ -87,13 +98,19 @@ public class OperationManager extends Ab } } - if (layout == null) { - layout = new PatternLayout(DEFAULT_LAYOUT_PATTERN); - LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern."); - } + final String VERBOSE_PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n"; + final String NONVERBOSE_PATTERN = "%-5p : %m%n"; + if (isVerbose) { + if (layout == null) { + layout = new PatternLayout(VERBOSE_PATTERN); + LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern."); + } + } else { + layout = new PatternLayout(NONVERBOSE_PATTERN); + } // Register another Appender (with the same layout) that talks to us. - Appender ap = new LogDivertAppender(layout, this); + Appender ap = new LogDivertAppender(layout, this, isVerbose); root.addAppender(ap); } Modified: hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java (original) +++ hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java Tue Nov 18 00:48:40 2014 @@ -33,8 +33,13 @@ public class EmbeddedThriftBinaryCLIServ super(new CLIService(null)); isEmbedded = true; HiveConf.setLoadHiveServer2Config(true); - cliService.init(new HiveConf()); + } + + @Override + public synchronized void init(HiveConf hiveConf) { + cliService.init(hiveConf); cliService.start(); + super.init(hiveConf); } public ICLIService getService() { Modified: hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java (original) +++ hive/branches/spark/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java Tue Nov 18 00:48:40 2014 @@ -55,7 +55,7 @@ public abstract class ThriftCLIService e protected static HiveAuthFactory hiveAuthFactory; protected int portNum; - protected InetAddress serverAddress; + protected InetAddress serverIPAddress; protected String hiveHost; protected TServer server; protected org.eclipse.jetty.server.Server httpServer; @@ -85,9 +85,9 @@ public abstract class ThriftCLIService e } try { if (hiveHost != null && !hiveHost.isEmpty()) { - serverAddress = InetAddress.getByName(hiveHost); + serverIPAddress = InetAddress.getByName(hiveHost); } else { - serverAddress = InetAddress.getLocalHost(); + serverIPAddress = InetAddress.getLocalHost(); } } catch (UnknownHostException e) { throw new ServiceException(e); @@ -153,8 +153,8 @@ public abstract class ThriftCLIService e return portNum; } - public InetAddress getServerAddress() { - return serverAddress; + public InetAddress getServerIPAddress() { + return serverIPAddress; } @Override Modified: hive/branches/spark/service/src/java/org/apache/hive/service/server/HiveServer2.java URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/server/HiveServer2.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/service/src/java/org/apache/hive/service/server/HiveServer2.java (original) +++ hive/branches/spark/service/src/java/org/apache/hive/service/server/HiveServer2.java Tue Nov 18 00:48:40 2014 @@ -250,10 +250,10 @@ public class HiveServer2 extends Composi } private String getServerInstanceURI(HiveConf hiveConf) throws Exception { - if ((thriftCLIService == null) || (thriftCLIService.getServerAddress() == null)) { + if ((thriftCLIService == null) || (thriftCLIService.getServerIPAddress() == null)) { throw new Exception("Unable to get the server address; it hasn't been initialized yet."); } - return thriftCLIService.getServerAddress().getHostAddress() + ":" + return thriftCLIService.getServerIPAddress().getHostName() + ":" + thriftCLIService.getPortNumber(); } Modified: hive/branches/spark/service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java (original) +++ hive/branches/spark/service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java Tue Nov 18 00:48:40 2014 @@ -17,29 +17,38 @@ */ package org.apache.hive.service.cli.operation; -import org.junit.Assert; +import java.io.File; + import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hive.service.cli.*; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hive.service.cli.FetchOrientation; +import org.apache.hive.service.cli.FetchType; +import org.apache.hive.service.cli.HiveSQLException; +import org.apache.hive.service.cli.OperationHandle; +import org.apache.hive.service.cli.OperationState; +import org.apache.hive.service.cli.OperationStatus; +import org.apache.hive.service.cli.RowSet; +import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService; import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient; import org.junit.After; +import org.junit.Assert; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import java.io.File; - /** * TestOperationLoggingAPI * Test the FetchResults of TFetchType.LOG in thrift level. */ public class TestOperationLoggingAPI { - private HiveConf hiveConf = new HiveConf(); - private String tableName = "testOperationLoggingAPI_table"; + private static HiveConf hiveConf; + private final String tableName = "testOperationLoggingAPI_table"; private File dataFile; private ThriftCLIServiceClient client; private SessionHandle sessionHandle; - private String sql = "select * from " + tableName; - private String[] expectedLogs = { + private final String sql = "select * from " + tableName; + private final String[] expectedLogs = { "Parsing command", "Parse Completed", "Starting Semantic Analysis", @@ -47,6 +56,12 @@ public class TestOperationLoggingAPI { "Starting command" }; + @BeforeClass + public static void setUpBeforeClass() { + hiveConf = new HiveConf(); + hiveConf.setBoolean(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE.varname, true); + } + /** * Start embedded mode, open a session, and create a table for cases usage * @throws Exception @@ -247,7 +262,7 @@ public class TestOperationLoggingAPI { private void verifyFetchedLog(String logs) { for (String log : expectedLogs) { - Assert.assertTrue(logs.contains(log)); + Assert.assertTrue("Checking for presence of " + log, logs.contains(log)); } } } Modified: hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java URL: http://svn.apache.org/viewvc/hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1640263&r1=1640262&r2=1640263&view=diff ============================================================================== --- hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original) +++ hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Tue Nov 18 00:48:40 2014 @@ -552,7 +552,13 @@ public class Hadoop23Shims extends Hadoo FileStatus fileStatus = fs.getFileStatus(file); AclStatus aclStatus = null; if (isExtendedAclEnabled(conf)) { - aclStatus = fs.getAclStatus(file); + //Attempt extended Acl operations only if its enabled, but don't fail the operation regardless. + try { + aclStatus = fs.getAclStatus(file); + } catch (Exception e) { + LOG.info("Skipping ACL inheritance: File system for path " + file + " " + + "does not support ACLs but dfs.namenode.acls.enabled is set to true: " + e, e); + } } return new Hadoop23FileStatus(fileStatus, aclStatus); } @@ -568,19 +574,25 @@ public class Hadoop23Shims extends Hadoo run(fsShell, new String[]{"-chgrp", "-R", group, target.toString()}); if (isExtendedAclEnabled(conf)) { - AclStatus aclStatus = ((Hadoop23FileStatus) sourceStatus).getAclStatus(); - List<AclEntry> aclEntries = aclStatus.getEntries(); - removeBaseAclEntries(aclEntries); - - //the ACL api's also expect the tradition user/group/other permission in the form of ACL - FsPermission sourcePerm = sourceStatus.getFileStatus().getPermission(); - aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.USER, sourcePerm.getUserAction())); - aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.GROUP, sourcePerm.getGroupAction())); - aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.OTHER, sourcePerm.getOtherAction())); - - //construct the -setfacl command - String aclEntry = Joiner.on(",").join(aclStatus.getEntries()); - run(fsShell, new String[]{"-setfacl", "-R", "--set", aclEntry, target.toString()}); + //Attempt extended Acl operations only if its enabled, 8791but don't fail the operation regardless. + try { + AclStatus aclStatus = ((Hadoop23FileStatus) sourceStatus).getAclStatus(); + List<AclEntry> aclEntries = aclStatus.getEntries(); + removeBaseAclEntries(aclEntries); + + //the ACL api's also expect the tradition user/group/other permission in the form of ACL + FsPermission sourcePerm = sourceStatus.getFileStatus().getPermission(); + aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.USER, sourcePerm.getUserAction())); + aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.GROUP, sourcePerm.getGroupAction())); + aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.OTHER, sourcePerm.getOtherAction())); + + //construct the -setfacl command + String aclEntry = Joiner.on(",").join(aclStatus.getEntries()); + run(fsShell, new String[]{"-setfacl", "-R", "--set", aclEntry, target.toString()}); + } catch (Exception e) { + LOG.info("Skipping ACL inheritance: File system for path " + target + " " + + "does not support ACLs but dfs.namenode.acls.enabled is set to true: " + e, e); + } } else { String permission = Integer.toString(sourceStatus.getFileStatus().getPermission().toShort(), 8); run(fsShell, new String[]{"-chmod", "-R", permission, target.toString()});
