git commit: PHOENIX-938: Use higher priority queue for index updates to prevent deadlock
Repository: phoenix Updated Branches: refs/heads/4.0 2d1e512bb -> 463dabdcb PHOENIX-938: Use higher priority queue for index updates to prevent deadlock Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/463dabdc Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/463dabdc Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/463dabdc Branch: refs/heads/4.0 Commit: 463dabdcb9e3af7cc9182556c8e56fce0caca0de Parents: 2d1e512 Author: Jesse Yates Authored: Wed Jul 23 11:10:52 2014 -0700 Committer: Jesse Yates Committed: Wed Jul 23 11:11:23 2014 -0700 -- .../phoenix/end2end/index/IndexHandlerIT.java | 166 +++ .../hbase/ipc/PhoenixIndexRpcScheduler.java | 125 ++ .../phoenix/hbase/index/IndexQosCompat.java | 98 +++ .../index/IndexQosRpcControllerFactory.java | 86 ++ .../hbase/index/builder/IndexBuildManager.java | 1 - .../hbase/index/builder/IndexBuilder.java | 1 - .../ipc/PhoenixIndexRpcSchedulerFactory.java| 104 .../index/table/CoprocessorHTableFactory.java | 65 .../java/org/apache/phoenix/util/IndexUtil.java | 6 + .../hbase/ipc/PhoenixIndexRpcSchedulerTest.java | 99 +++ .../PhoenixIndexRpcSchedulerFactoryTest.java| 105 pom.xml | 4 +- 12 files changed, 827 insertions(+), 33 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/463dabdc/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexHandlerIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexHandlerIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexHandlerIT.java new file mode 100644 index 000..a829ae1 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexHandlerIT.java @@ -0,0 +1,166 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end.index; + +import static org.junit.Assert.assertEquals; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.CellScannable; +import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.ipc.DelegatingPayloadCarryingRpcController; +import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController; +import org.apache.hadoop.hbase.ipc.RpcControllerFactory; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.phoenix.hbase.index.IndexQosRpcControllerFactory; +import org.apache.phoenix.hbase.index.TableName; +import org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; + +/** + * Comprehensive test that ensures we are adding custom index handlers + */ +public class IndexHandlerIT { + +public static class CountingIndexClientRpcFactory extends RpcControllerFactory { + +private IndexQosRpcControllerFactory delegate; + +public CountingIndexClientRpcFactory(Configuration conf) { +super(conf); +this.delegate = new IndexQosRpcControllerFactory(conf); +} + +@Override +public PayloadCarryingRpcController newController() { +PayloadCarryingRpcController controller = delegate.newController(); +return new CountingIndexClientRpcController(controller); +} + +@Override +public PayloadCarryingRpcController newController(CellScanner cellScanner) { +PayloadC
git commit: PHOENIX-938: Use higher priority queue for index updates to prevent deadlock
Repository: phoenix Updated Branches: refs/heads/master 36a41c86a -> 1954c717a PHOENIX-938: Use higher priority queue for index updates to prevent deadlock Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/1954c717 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/1954c717 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/1954c717 Branch: refs/heads/master Commit: 1954c717a12561bdc2184ba23c53afae3f900084 Parents: 36a41c8 Author: Jesse Yates Authored: Wed Jul 23 11:10:52 2014 -0700 Committer: Jesse Yates Committed: Wed Jul 23 11:10:52 2014 -0700 -- .../phoenix/end2end/index/IndexHandlerIT.java | 166 +++ .../hbase/ipc/PhoenixIndexRpcScheduler.java | 125 ++ .../phoenix/hbase/index/IndexQosCompat.java | 98 +++ .../index/IndexQosRpcControllerFactory.java | 86 ++ .../hbase/index/builder/BaseIndexBuilder.java | 2 - .../hbase/index/builder/IndexBuildManager.java | 1 - .../hbase/index/builder/IndexBuilder.java | 1 - .../ipc/PhoenixIndexRpcSchedulerFactory.java| 104 .../index/table/CoprocessorHTableFactory.java | 65 .../java/org/apache/phoenix/util/IndexUtil.java | 6 + .../hbase/ipc/PhoenixIndexRpcSchedulerTest.java | 99 +++ .../PhoenixIndexRpcSchedulerFactoryTest.java| 105 pom.xml | 4 +- 13 files changed, 827 insertions(+), 35 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/1954c717/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexHandlerIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexHandlerIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexHandlerIT.java new file mode 100644 index 000..a829ae1 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexHandlerIT.java @@ -0,0 +1,166 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end.index; + +import static org.junit.Assert.assertEquals; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.CellScannable; +import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.ipc.DelegatingPayloadCarryingRpcController; +import org.apache.hadoop.hbase.ipc.PayloadCarryingRpcController; +import org.apache.hadoop.hbase.ipc.RpcControllerFactory; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.phoenix.hbase.index.IndexQosRpcControllerFactory; +import org.apache.phoenix.hbase.index.TableName; +import org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; + +/** + * Comprehensive test that ensures we are adding custom index handlers + */ +public class IndexHandlerIT { + +public static class CountingIndexClientRpcFactory extends RpcControllerFactory { + +private IndexQosRpcControllerFactory delegate; + +public CountingIndexClientRpcFactory(Configuration conf) { +super(conf); +this.delegate = new IndexQosRpcControllerFactory(conf); +} + +@Override +public PayloadCarryingRpcController newController() { +PayloadCarryingRpcController controller = delegate.newController(); +return new CountingIndexClientRpcController(controller); +} + +@Override +public PayloadCarryingRpcController
svn commit: r1613476 - /phoenix/site/source/src/site/markdown/secondary_indexing.md
Author: jyates Date: Fri Jul 25 17:02:13 2014 New Revision: 1613476 URL: http://svn.apache.org/r1613476 Log: Adding docs for removal of mutable secondary index deadlock for 0.98.4+ Modified: phoenix/site/source/src/site/markdown/secondary_indexing.md Modified: phoenix/site/source/src/site/markdown/secondary_indexing.md URL: http://svn.apache.org/viewvc/phoenix/site/source/src/site/markdown/secondary_indexing.md?rev=1613476&r1=1613475&r2=1613476&view=diff == --- phoenix/site/source/src/site/markdown/secondary_indexing.md (original) +++ phoenix/site/source/src/site/markdown/secondary_indexing.md Fri Jul 25 17:02:13 2014 @@ -94,6 +94,7 @@ If the index tables are not setup correc Only mutable indexing requires special configuration options in the region server to run - phoenix ensures that they are setup correctly when you enable mutable indexing on the table; if the correct properties are not set, you will not be able to turn it on. You will need to add the following parameters to `hbase-site.xml`: + ``` hbase.regionserver.wal.codec @@ -103,6 +104,65 @@ You will need to add the following param This enables custom WAL edits to be written, ensuring proper writing/replay of the index updates. This codec supports the usual host of WALEdit options, most notably WALEdit compression. +### Advanced Setup - Removing Index Deadlocks (0.98.4+) + +Phoenix releases that include these changes (4.1+, 5.0.0+) are still backwards compatible with older versions of phoenix (to the extent that they are semantically compatible) as well as with older versions of HBase (0.98.1-0.98.3). + +As of HBase 0.98.4 we can finally remove the change of index deadlocks. In HBase you can tune the number of RPC threads to match client writes + index writes, but there is still a chance you could have a deadlock in an unlucky scenario (i.e. Client A -> Server A, Client B -> Server B, each taking the last RPC thread. Then each server attempts to make an index update to the other, Server A -> Server B, and vice versa, but they can't as there are no more available RPC threads). + +As of [PHOENIX-938](https://issues.apache.org/jira/browse/PHOENIX-938) and [HBASE-11513](https://issues.apache.org/jira/browse/HBASE-11513) we can remove these deadlocks by providing a different set of RPC handlers for index updates by giving index updates their own 'rpc priority' and handling the priorities via a custom Phoenix RPC Handler. + +The properties you need to set to enable this are + + Server Side + +``` + + hbase.region.server.rpc.scheduler.factory.class + org.apache.phoenix.hbase.index.ipc.PhoenixIndexRpcSchedulerFactory + Factory to create the Phoenix RPC Scheduler that knows to put index updates into index queues + +``` + +After adding these settings to your hbase-site.xml, you just need to do a rolling restart of your cluster. + + +Note that having the configs on both client and server side will not impact correctness or performance. + + Tuning + +By default, index priority range is between (1000, 1050]. Higher priorites within the index range, at this time, do not means updates are processed sooner. However, we reserve this range to provide that possibility in the future. + +You can specifiy this range however to suit your individual cluster requirements by adjusting the follwing parameters + +``` + + org.apache.phoenix.regionserver.index.priority.min + 1050 + Value to specify to bottom (inclusive) of the range in which index priority may lie + + + org.apache.phoenix.regionserver.index.priority.max + 1050 + Value to specify to top (exclusive) of the range in which index priority may lie + +``` + +The number of RPC Handler Threads can be specified via: + +``` + + org.apache.phoenix.regionserver.index.handler.count + 30 + Number of threads to use when serving index write requests + +``` + +Though the actual number of threads is dictated by the Max(number of call queues, handler count), where the number of call queues is determined by standard HBase configuration (see below). + + +To further tune the queues, you can adjust the standard rpc queue length parameters (currently, there are no special knobs for the index queues), specifically "ipc.server.max.callqueue.length" and "ipc.server.callqueue.handler.factor". See the [HBase Reference Guide](http://hbase.apache.org/book.html) for more details. + ## Tuning Out the box, indexing is pretty fast. However, to optimize for your particular environment and workload, there are several properties you can tune.
svn commit: r1613496 - in /phoenix: phoenix-docs/src/main/org/h2/ phoenix-docs/src/main/org/h2/jdbc/ phoenix-docs/src/main/org/h2/jdbcx/ phoenix-docs/src/main/org/h2/tools/ site/publish/
Author: jyates Date: Fri Jul 25 19:03:23 2014 New Revision: 1613496 URL: http://svn.apache.org/r1613496 Log: Actually building the site for index updates Modified: phoenix/phoenix-docs/src/main/org/h2/Driver.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcResultSet.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcStatement.java phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcConnectionPool.java phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcDataSource.java phoenix/phoenix-docs/src/main/org/h2/tools/SimpleResultSet.java phoenix/site/publish/secondary_indexing.html Modified: phoenix/phoenix-docs/src/main/org/h2/Driver.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/Driver.java?rev=1613496&r1=1613495&r2=1613496&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/Driver.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/Driver.java Fri Jul 25 19:03:23 2014 @@ -17,7 +17,7 @@ import org.h2.message.DbException; import org.h2.message.TraceSystem; import org.h2.upgrade.DbUpgrade; -/*## Java 1.7 ## +//## Java 1.7 ## import java.util.logging.Logger; //*/ @@ -138,7 +138,7 @@ public class Driver implements java.sql. /** * [Not supported] */ -/*## Java 1.7 ## +//## Java 1.7 ## public Logger getParentLogger() { return null; } Modified: phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java?rev=1613496&r1=1613495&r2=1613496&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java Fri Jul 25 19:03:23 2014 @@ -1440,7 +1440,7 @@ public class JdbcCallableStatement exten * @param parameterIndex the parameter index (1, 2, ...) * @param type the class of the returned value */ -/*## Java 1.7 ## +//## Java 1.7 ## public T getObject(int parameterIndex, Class type) { return null; } @@ -1452,7 +1452,7 @@ public class JdbcCallableStatement exten * @param parameterName the parameter name * @param type the class of the returned value */ -/*## Java 1.7 ## +//## Java 1.7 ## public T getObject(String parameterName, Class type) { return null; } Modified: phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java?rev=1613496&r1=1613495&r2=1613496&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java Fri Jul 25 19:03:23 2014 @@ -49,7 +49,7 @@ import java.sql.SQLXML; import java.sql.SQLClientInfoException; //*/ -/*## Java 1.7 ## +//## Java 1.7 ## import java.util.concurrent.Executor; //*/ @@ -1678,7 +1678,7 @@ public class JdbcConnection extends Trac * * @param schema the schema */ -/*## Java 1.7 ## +//## Java 1.7 ## public void setSchema(String schema) { // not supported } @@ -1687,7 +1687,7 @@ public class JdbcConnection extends Trac /** * [Not supported] */ -/*## Java 1.7 ## +//## Java 1.7 ## public String getSchema() { return null; } @@ -1698,7 +1698,7 @@ public class JdbcConnection extends Trac * * @param executor the executor used by this method */ -/*## Java 1.7 ## +//## Java 1.7 ## public void abort(Executor executor) { // not supported } @@ -1710,7 +1710,7 @@ public class JdbcConnection extends Trac * @param executor the executor used by this method * @param milliseconds the TCP connection timeout */ -/*## Java 1.7 ## +//## Java 1.7 ## public void setNetworkTimeout(Executor executor, int milliseconds) { // not supported } @@ -1719,7 +1719,7 @@ public class JdbcConnection extends Trac /** * [Not supported] */ -/*## Java 1.7 ## +//## Java 1.7 ## public int getNetworkTimeout() { return 0; } Modified: phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java?rev=1613496&r1=1613495&r2=1613496&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaDa
[2/3] PHOENIX-177: Collect usage and performance metrics
http://git-wip-us.apache.org/repos/asf/phoenix/blob/b7f46c10/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java index e24591d..b3bec6e 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.phoenix.hbase.index.builder.IndexBuildManager; import org.apache.phoenix.hbase.index.builder.IndexBuilder; @@ -64,6 +65,11 @@ import org.apache.phoenix.hbase.index.write.IndexWriter; import org.apache.phoenix.hbase.index.write.recovery.PerRegionIndexWriteCache; import org.apache.phoenix.hbase.index.write.recovery.StoreFailuresInCachePolicy; import org.apache.phoenix.hbase.index.write.recovery.TrackingParallelWriterIndexCommitter; +import org.apache.phoenix.trace.TracingCompat; +import org.apache.phoenix.trace.util.NullSpan; +import org.apache.phoenix.trace.util.Tracing; +import org.cloudera.htrace.Span; +import org.cloudera.htrace.Trace; import com.google.common.collect.Multimap; @@ -134,10 +140,18 @@ public class Indexer extends BaseRegionObserver { private static final int INDEX_WAL_COMPRESSION_MINIMUM_SUPPORTED_VERSION = VersionUtil .encodeVersion("0.94.9"); +/** + * Raw configuration, for tracing. Coprocessors generally will get a subset configuration (if + * they are on a per-table basis), so we need the raw one from the server, so we can get the + * actual configuration keys + */ +private Configuration rawConf; + @Override public void start(CoprocessorEnvironment e) throws IOException { try { final RegionCoprocessorEnvironment env = (RegionCoprocessorEnvironment) e; +this.rawConf = env.getRegionServerServices().getConfiguration(); String serverName = env.getRegionServerServices().getServerName().getServerName(); if (env.getConfiguration().getBoolean(CHECK_VERSION_CONF_KEY, true)) { // make sure the right version <-> combinations are allowed. @@ -312,12 +326,24 @@ public class Indexer extends BaseRegionObserver { // don't worry which one we get WALEdit edit = miniBatchOp.getWalEdit(0); +// get the current span, or just use a null-span to avoid a bunch of if statements +Span current = Trace.startSpan("Starting to build index updates").getSpan(); +if (current == null) { +current = NullSpan.INSTANCE; +} + // get the index updates for all elements in this batch Collection> indexUpdates = this.builder.getIndexUpdate(miniBatchOp, mutations.values()); +current.addTimelineAnnotation("Built index updates, doing preStep"); +TracingCompat.addAnnotation(current, "index update count", indexUpdates.size()); + // write them, either to WAL or the index tables doPre(indexUpdates, edit, durability); + +// close the span +current.stop(); } private class MultiMutation extends Mutation { @@ -458,16 +484,24 @@ public class Indexer extends BaseRegionObserver { return; } +// get the current span, or just use a null-span to avoid a bunch of if statements +Span current = Trace.startSpan("Completing index writes").getSpan(); +if (current == null) { +current = NullSpan.INSTANCE; +} + // there is a little bit of excess here- we iterate all the non-indexed kvs for this check first // and then do it again later when getting out the index updates. This should be pretty minor // though, compared to the rest of the runtime IndexedKeyValue ikv = getFirstIndexedKeyValue(edit); + /* * early exit - we have nothing to write, so we don't need to do anything else. NOTE: we don't * release the WAL Rolling lock (INDEX_UPDATE_LOCK) since we never take it in doPre if there are * no index updates. */ if (ikv == null) { +current.stop(); return; } @@ -483,6 +517,7 @@ public class Indexer extends BaseRegionObserver { // references originally - therefore, we just pass in a null factory here and use the ones // already specified on each reference try { +current.addTimelineAnnotation("Actually doing index update for first time"); writer.writeAndKillYourselfOnFailure(indexUpdates); } finally { // With a custom kill policy, we
[1/3] PHOENIX-177: Collect usage and performance metrics
Repository: phoenix Updated Branches: refs/heads/master 9185f760b -> b7f46c105 http://git-wip-us.apache.org/repos/asf/phoenix/blob/b7f46c10/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TestableMetricsWriter.java -- diff --git a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TestableMetricsWriter.java b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TestableMetricsWriter.java new file mode 100644 index 000..b6bc75d --- /dev/null +++ b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TestableMetricsWriter.java @@ -0,0 +1,30 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.trace; + +import org.apache.phoenix.metrics.MetricsWriter; + +/** + * Marker interface for a MetricsWriter that can be registered to the current metrics system. The + * writer should convert from the metrics information it receives from the metrics system to Phoenix + * records that the MetricsWriter can read (and subsequently write). + */ +public interface TestableMetricsWriter { + +public void setWriterForTesting(MetricsWriter writer); +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/b7f46c10/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java -- diff --git a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java new file mode 100644 index 000..6ec12de --- /dev/null +++ b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java @@ -0,0 +1,94 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.trace; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.CompatibilityFactory; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.phoenix.metrics.MetricsWriter; +import org.cloudera.htrace.Span; +import org.cloudera.htrace.SpanReceiver; + +/** + * Utilities for tracing that are common among the compatibility and core classes. + */ +public class TracingCompat { + +private static final Log LOG = LogFactory.getLog(TracingCompat.class); + +/** + * @return a new SpanReceiver that will write to the correct metrics system + */ +public static SpanReceiver newTraceMetricSource() { +return CompatibilityFactory.getInstance(PhoenixSpanReceiver.class); +} + +public static final String DEFAULT_STATS_TABLE_NAME = "PHOENIX.TRACING_STATS"; + +/** + * Configuration key to overwrite the tablename that should be used as the target table + */ +public static final String TARGET_TABLE_CONF_KEY = +"org.apache.phoenix._internal.trace.tablename"; + +public static final String METRIC_SOURCE_KEY = "phoenix."; + +/** Set context to enable filtering */ +public static final String METRICS_CONTEXT = "tracing"; + +public static void addAnnotation(Span span, String message, int value) { +span.addKVAnnotation(message.getBytes(), Bytes.toBytes(value)); +} + +public static Pair readAnnotation(byte[] key, byte[] value) { +return new Pair(new String(key), Integer.toString(Bytes
[3/3] git commit: PHOENIX-177: Collect usage and performance metrics
PHOENIX-177: Collect usage and performance metrics Add basic Dapper-like tracing (using Cloudera's HTrace library) to phoenix requests. This is the basic infrastructure to support more holistic, non-profiler based analysis. This patch includes, among other things, the infrastructure to use HTrace, async-tracing handling via the Hadoop metrics2 framework, and trace read/write to a phoenix table. Currently, do NOT support Hadoop1 (though does work against Hadoop1). Default builds to hadoop2, rather than hadoop1 (particularly as hadoop1 is now a second-class citizen). Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e8def027 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e8def027 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e8def027 Branch: refs/heads/4.0 Commit: e8def027a9c60e978ac79acbc6c71979e7478a43 Parents: 083c5f2 Author: Jesse Yates Authored: Fri Jun 6 16:11:32 2014 -0700 Committer: Jesse Yates Committed: Mon Jul 28 06:38:20 2014 -0700 -- phoenix-core/pom.xml| 47 +-- .../apache/phoenix/trace/BaseTracingTestIT.java | 117 ++ .../phoenix/trace/DelegatingConnection.java | 328 +++ .../phoenix/trace/DisableableMetricsWriter.java | 83 .../trace/Hadoop1TracingTestEnabler.java| 86 .../apache/phoenix/trace/PhoenixMetricImpl.java | 44 ++ .../phoenix/trace/PhoenixMetricRecordImpl.java | 71 .../trace/PhoenixTableMetricsWriterIT.java | 119 ++ .../apache/phoenix/trace/PhoenixTagImpl.java| 52 +++ .../phoenix/trace/PhoenixTracingEndToEndIT.java | 401 +++ .../apache/phoenix/trace/TraceReaderTest.java | 181 + .../org/apache/phoenix/call/CallRunner.java | 66 +++ .../org/apache/phoenix/call/CallWrapper.java| 29 ++ .../coprocessor/BaseScannerRegionObserver.java | 36 +- .../coprocessor/DelegateRegionScanner.java | 78 .../apache/phoenix/execute/BasicQueryPlan.java | 17 +- .../apache/phoenix/execute/MutationState.java | 24 +- .../org/apache/phoenix/hbase/index/Indexer.java | 39 ++ .../phoenix/iterate/ParallelIterators.java | 5 +- .../apache/phoenix/jdbc/PhoenixConnection.java | 38 +- .../apache/phoenix/jdbc/PhoenixStatement.java | 25 +- .../trace/PhoenixTableMetricsWriter.java| 255 .../org/apache/phoenix/trace/TraceReader.java | 375 + .../apache/phoenix/trace/TracingIterator.java | 58 +++ .../trace/util/ConfigurationAdapter.java| 56 +++ .../org/apache/phoenix/trace/util/NullSpan.java | 112 ++ .../org/apache/phoenix/trace/util/Tracing.java | 282 + .../phoenix/util/PhoenixContextExecutor.java| 23 ++ .../java/org/apache/phoenix/util/QueryUtil.java | 29 ++ .../test/resources/hadoop-metrics2.properties | 25 ++ .../src/test/resources/log4j.properties | 4 +- phoenix-hadoop-compat/pom.xml | 29 +- .../org/apache/phoenix/metrics/MetricInfo.java | 51 +++ .../org/apache/phoenix/metrics/Metrics.java | 39 ++ .../apache/phoenix/metrics/MetricsManager.java | 58 +++ .../apache/phoenix/metrics/MetricsWriter.java | 31 ++ .../phoenix/metrics/PhoenixAbstractMetric.java | 30 ++ .../phoenix/metrics/PhoenixMetricTag.java | 27 ++ .../phoenix/metrics/PhoenixMetricsRecord.java | 35 ++ .../phoenix/trace/PhoenixSpanReceiver.java | 26 ++ .../phoenix/trace/TestableMetricsWriter.java| 30 ++ .../org/apache/phoenix/trace/TracingCompat.java | 94 + .../org/apache/phoenix/metrics/LoggingSink.java | 56 +++ .../phoenix/metrics/TracingTestCompat.java | 45 +++ phoenix-hadoop2-compat/pom.xml | 45 +++ .../phoenix/metrics/MetricsManagerImpl.java | 71 .../apache/phoenix/trace/MetricsInfoImpl.java | 63 +++ .../phoenix/trace/PhoenixMetricsWriter.java | 176 .../apache/phoenix/trace/TraceMetricSource.java | 192 + .../org.apache.phoenix.metrics.MetricsManager | 1 + ...org.apache.phoenix.trace.PhoenixSpanReceiver | 1 + ...g.apache.phoenix.trace.TestableMetricsWriter | 1 + .../metrics2/impl/ExposedMetricCounterLong.java | 35 ++ .../metrics2/impl/ExposedMetricsRecordImpl.java | 43 ++ .../metrics2/lib/ExposedMetricsInfoImpl.java| 32 ++ .../phoenix/trace/PhoenixMetricsWriterTest.java | 142 +++ .../org/apache/phoenix/trace/TracingTest.java | 34 ++ pom.xml | 40 +- 58 files changed, 4477 insertions(+), 55 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/e8def027/phoenix-core/pom.xml -- diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml index 5f55e29..af6dcb6 100644 --- a/phoe
[2/3] PHOENIX-177: Collect usage and performance metrics
http://git-wip-us.apache.org/repos/asf/phoenix/blob/e8def027/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java index e24591d..b3bec6e 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/Indexer.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.regionserver.ScanType; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.phoenix.hbase.index.builder.IndexBuildManager; import org.apache.phoenix.hbase.index.builder.IndexBuilder; @@ -64,6 +65,11 @@ import org.apache.phoenix.hbase.index.write.IndexWriter; import org.apache.phoenix.hbase.index.write.recovery.PerRegionIndexWriteCache; import org.apache.phoenix.hbase.index.write.recovery.StoreFailuresInCachePolicy; import org.apache.phoenix.hbase.index.write.recovery.TrackingParallelWriterIndexCommitter; +import org.apache.phoenix.trace.TracingCompat; +import org.apache.phoenix.trace.util.NullSpan; +import org.apache.phoenix.trace.util.Tracing; +import org.cloudera.htrace.Span; +import org.cloudera.htrace.Trace; import com.google.common.collect.Multimap; @@ -134,10 +140,18 @@ public class Indexer extends BaseRegionObserver { private static final int INDEX_WAL_COMPRESSION_MINIMUM_SUPPORTED_VERSION = VersionUtil .encodeVersion("0.94.9"); +/** + * Raw configuration, for tracing. Coprocessors generally will get a subset configuration (if + * they are on a per-table basis), so we need the raw one from the server, so we can get the + * actual configuration keys + */ +private Configuration rawConf; + @Override public void start(CoprocessorEnvironment e) throws IOException { try { final RegionCoprocessorEnvironment env = (RegionCoprocessorEnvironment) e; +this.rawConf = env.getRegionServerServices().getConfiguration(); String serverName = env.getRegionServerServices().getServerName().getServerName(); if (env.getConfiguration().getBoolean(CHECK_VERSION_CONF_KEY, true)) { // make sure the right version <-> combinations are allowed. @@ -312,12 +326,24 @@ public class Indexer extends BaseRegionObserver { // don't worry which one we get WALEdit edit = miniBatchOp.getWalEdit(0); +// get the current span, or just use a null-span to avoid a bunch of if statements +Span current = Trace.startSpan("Starting to build index updates").getSpan(); +if (current == null) { +current = NullSpan.INSTANCE; +} + // get the index updates for all elements in this batch Collection> indexUpdates = this.builder.getIndexUpdate(miniBatchOp, mutations.values()); +current.addTimelineAnnotation("Built index updates, doing preStep"); +TracingCompat.addAnnotation(current, "index update count", indexUpdates.size()); + // write them, either to WAL or the index tables doPre(indexUpdates, edit, durability); + +// close the span +current.stop(); } private class MultiMutation extends Mutation { @@ -458,16 +484,24 @@ public class Indexer extends BaseRegionObserver { return; } +// get the current span, or just use a null-span to avoid a bunch of if statements +Span current = Trace.startSpan("Completing index writes").getSpan(); +if (current == null) { +current = NullSpan.INSTANCE; +} + // there is a little bit of excess here- we iterate all the non-indexed kvs for this check first // and then do it again later when getting out the index updates. This should be pretty minor // though, compared to the rest of the runtime IndexedKeyValue ikv = getFirstIndexedKeyValue(edit); + /* * early exit - we have nothing to write, so we don't need to do anything else. NOTE: we don't * release the WAL Rolling lock (INDEX_UPDATE_LOCK) since we never take it in doPre if there are * no index updates. */ if (ikv == null) { +current.stop(); return; } @@ -483,6 +517,7 @@ public class Indexer extends BaseRegionObserver { // references originally - therefore, we just pass in a null factory here and use the ones // already specified on each reference try { +current.addTimelineAnnotation("Actually doing index update for first time"); writer.writeAndKillYourselfOnFailure(indexUpdates); } finally { // With a custom kill policy, we
[1/3] PHOENIX-177: Collect usage and performance metrics
Repository: phoenix Updated Branches: refs/heads/4.0 083c5f2d4 -> e8def027a http://git-wip-us.apache.org/repos/asf/phoenix/blob/e8def027/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TestableMetricsWriter.java -- diff --git a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TestableMetricsWriter.java b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TestableMetricsWriter.java new file mode 100644 index 000..b6bc75d --- /dev/null +++ b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TestableMetricsWriter.java @@ -0,0 +1,30 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.trace; + +import org.apache.phoenix.metrics.MetricsWriter; + +/** + * Marker interface for a MetricsWriter that can be registered to the current metrics system. The + * writer should convert from the metrics information it receives from the metrics system to Phoenix + * records that the MetricsWriter can read (and subsequently write). + */ +public interface TestableMetricsWriter { + +public void setWriterForTesting(MetricsWriter writer); +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/e8def027/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java -- diff --git a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java new file mode 100644 index 000..6ec12de --- /dev/null +++ b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java @@ -0,0 +1,94 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.trace; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.CompatibilityFactory; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.phoenix.metrics.MetricsWriter; +import org.cloudera.htrace.Span; +import org.cloudera.htrace.SpanReceiver; + +/** + * Utilities for tracing that are common among the compatibility and core classes. + */ +public class TracingCompat { + +private static final Log LOG = LogFactory.getLog(TracingCompat.class); + +/** + * @return a new SpanReceiver that will write to the correct metrics system + */ +public static SpanReceiver newTraceMetricSource() { +return CompatibilityFactory.getInstance(PhoenixSpanReceiver.class); +} + +public static final String DEFAULT_STATS_TABLE_NAME = "PHOENIX.TRACING_STATS"; + +/** + * Configuration key to overwrite the tablename that should be used as the target table + */ +public static final String TARGET_TABLE_CONF_KEY = +"org.apache.phoenix._internal.trace.tablename"; + +public static final String METRIC_SOURCE_KEY = "phoenix."; + +/** Set context to enable filtering */ +public static final String METRICS_CONTEXT = "tracing"; + +public static void addAnnotation(Span span, String message, int value) { +span.addKVAnnotation(message.getBytes(), Bytes.toBytes(value)); +} + +public static Pair readAnnotation(byte[] key, byte[] value) { +return new Pair(new String(key), Integer.toString(Bytes.to
[3/3] git commit: PHOENIX-177: Collect usage and performance metrics
PHOENIX-177: Collect usage and performance metrics Add basic Dapper-like tracing (using Cloudera's HTrace library) to phoenix requests. This is the basic infrastructure to support more holistic, non-profiler based analysis. This patch includes, among other things, the infrastructure to use HTrace, async-tracing handling via the Hadoop metrics2 framework, and trace read/write to a phoenix table. Currently, do NOT support Hadoop1 (though does work against Hadoop1). Default builds to hadoop2, rather than hadoop1 (particularly as hadoop1 is now a second-class citizen). Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b7f46c10 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b7f46c10 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b7f46c10 Branch: refs/heads/master Commit: b7f46c1051de3e23630dccb82677a0a16985f27c Parents: 9185f76 Author: Jesse Yates Authored: Fri Jun 6 16:11:32 2014 -0700 Committer: Jesse Yates Committed: Mon Jul 28 06:37:49 2014 -0700 -- phoenix-core/pom.xml| 47 +-- .../apache/phoenix/trace/BaseTracingTestIT.java | 117 ++ .../phoenix/trace/DelegatingConnection.java | 328 +++ .../phoenix/trace/DisableableMetricsWriter.java | 83 .../trace/Hadoop1TracingTestEnabler.java| 86 .../apache/phoenix/trace/PhoenixMetricImpl.java | 44 ++ .../phoenix/trace/PhoenixMetricRecordImpl.java | 71 .../trace/PhoenixTableMetricsWriterIT.java | 119 ++ .../apache/phoenix/trace/PhoenixTagImpl.java| 52 +++ .../phoenix/trace/PhoenixTracingEndToEndIT.java | 401 +++ .../apache/phoenix/trace/TraceReaderTest.java | 181 + .../org/apache/phoenix/call/CallRunner.java | 66 +++ .../org/apache/phoenix/call/CallWrapper.java| 29 ++ .../coprocessor/BaseScannerRegionObserver.java | 36 +- .../coprocessor/DelegateRegionScanner.java | 78 .../apache/phoenix/execute/BasicQueryPlan.java | 17 +- .../apache/phoenix/execute/MutationState.java | 24 +- .../org/apache/phoenix/hbase/index/Indexer.java | 39 ++ .../phoenix/iterate/ParallelIterators.java | 5 +- .../apache/phoenix/jdbc/PhoenixConnection.java | 38 +- .../apache/phoenix/jdbc/PhoenixStatement.java | 25 +- .../trace/PhoenixTableMetricsWriter.java| 255 .../org/apache/phoenix/trace/TraceReader.java | 375 + .../apache/phoenix/trace/TracingIterator.java | 58 +++ .../trace/util/ConfigurationAdapter.java| 56 +++ .../org/apache/phoenix/trace/util/NullSpan.java | 112 ++ .../org/apache/phoenix/trace/util/Tracing.java | 282 + .../phoenix/util/PhoenixContextExecutor.java| 23 ++ .../java/org/apache/phoenix/util/QueryUtil.java | 29 ++ .../test/resources/hadoop-metrics2.properties | 25 ++ .../src/test/resources/log4j.properties | 4 +- phoenix-hadoop-compat/pom.xml | 31 +- .../org/apache/phoenix/metrics/MetricInfo.java | 51 +++ .../org/apache/phoenix/metrics/Metrics.java | 39 ++ .../apache/phoenix/metrics/MetricsManager.java | 58 +++ .../apache/phoenix/metrics/MetricsWriter.java | 31 ++ .../phoenix/metrics/PhoenixAbstractMetric.java | 30 ++ .../phoenix/metrics/PhoenixMetricTag.java | 27 ++ .../phoenix/metrics/PhoenixMetricsRecord.java | 35 ++ .../phoenix/trace/PhoenixSpanReceiver.java | 26 ++ .../phoenix/trace/TestableMetricsWriter.java| 30 ++ .../org/apache/phoenix/trace/TracingCompat.java | 94 + .../org/apache/phoenix/metrics/LoggingSink.java | 56 +++ .../phoenix/metrics/TracingTestCompat.java | 45 +++ phoenix-hadoop2-compat/pom.xml | 47 ++- .../phoenix/metrics/MetricsManagerImpl.java | 71 .../apache/phoenix/trace/MetricsInfoImpl.java | 63 +++ .../phoenix/trace/PhoenixMetricsWriter.java | 176 .../apache/phoenix/trace/TraceMetricSource.java | 192 + .../org.apache.phoenix.metrics.MetricsManager | 1 + ...org.apache.phoenix.trace.PhoenixSpanReceiver | 1 + ...g.apache.phoenix.trace.TestableMetricsWriter | 1 + .../metrics2/impl/ExposedMetricCounterLong.java | 35 ++ .../metrics2/impl/ExposedMetricsRecordImpl.java | 43 ++ .../metrics2/lib/ExposedMetricsInfoImpl.java| 32 ++ .../phoenix/trace/PhoenixMetricsWriterTest.java | 142 +++ .../org/apache/phoenix/trace/TracingTest.java | 34 ++ pom.xml | 40 +- 58 files changed, 4479 insertions(+), 57 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/b7f46c10/phoenix-core/pom.xml -- diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml index f5c3ace..cfdee95 100644 --- a/p
svn commit: r1618075 [2/2] - in /phoenix: phoenix-docs/src/main/org/h2/ phoenix-docs/src/main/org/h2/jdbc/ phoenix-docs/src/main/org/h2/jdbcx/ phoenix-docs/src/main/org/h2/tools/ site/publish/ site/pu
Modified: phoenix/site/publish/roadmap.html URL: http://svn.apache.org/viewvc/phoenix/site/publish/roadmap.html?rev=1618075&r1=1618074&r2=1618075&view=diff == --- phoenix/site/publish/roadmap.html (original) +++ phoenix/site/publish/roadmap.html Thu Aug 14 22:21:43 2014 @@ -1,7 +1,7 @@ @@ -88,6 +88,7 @@ Dynamic Columns Skip Scan Bulk Loading + Request Tracing Amazon EMR Support Apache Flume Plugin @@ -280,6 +281,9 @@ Bulk Loading + Request Tracing + + Modified: phoenix/site/publish/salted.html URL: http://svn.apache.org/viewvc/phoenix/site/publish/salted.html?rev=1618075&r1=1618074&r2=1618075&view=diff == --- phoenix/site/publish/salted.html (original) +++ phoenix/site/publish/salted.html Thu Aug 14 22:21:43 2014 @@ -1,7 +1,7 @@ @@ -88,6 +88,7 @@ Dynamic Columns Skip Scan Bulk Loading + Request Tracing Amazon EMR Support Apache Flume Plugin @@ -267,6 +268,9 @@ Bulk Loading + Request Tracing + + Modified: phoenix/site/publish/secondary_indexing.html URL: http://svn.apache.org/viewvc/phoenix/site/publish/secondary_indexing.html?rev=1618075&r1=1618074&r2=1618075&view=diff == --- phoenix/site/publish/secondary_indexing.html (original) +++ phoenix/site/publish/secondary_indexing.html Thu Aug 14 22:21:43 2014 @@ -1,7 +1,7 @@ @@ -88,6 +88,7 @@ Dynamic Columns Skip Scan Bulk Loading + Request Tracing Amazon EMR Support Apache Flume Plugin @@ -454,6 +455,9 @@ Bulk Loading + Request Tracing + + Modified: phoenix/site/publish/sequences.html URL: http://svn.apache.org/viewvc/phoenix/site/publish/sequences.html?rev=1618075&r1=1618074&r2=1618075&view=diff == --- phoenix/site/publish/sequences.html (original) +++ phoenix/site/publish/sequences.html Thu Aug 14 22:21:43 2014 @@ -1,7 +1,7 @@ @@ -88,6 +88,7 @@
svn commit: r1618075 [1/2] - in /phoenix: phoenix-docs/src/main/org/h2/ phoenix-docs/src/main/org/h2/jdbc/ phoenix-docs/src/main/org/h2/jdbcx/ phoenix-docs/src/main/org/h2/tools/ site/publish/ site/pu
Author: jyates Date: Thu Aug 14 22:21:43 2014 New Revision: 1618075 URL: http://svn.apache.org/r1618075 Log: Adding docs for tracing Added: phoenix/site/publish/tracing.html phoenix/site/source/src/site/markdown/tracing.md Modified: phoenix/phoenix-docs/src/main/org/h2/Driver.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcResultSet.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcStatement.java phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcConnectionPool.java phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcDataSource.java phoenix/phoenix-docs/src/main/org/h2/tools/SimpleResultSet.java phoenix/site/publish/Phoenix-in-15-minutes-or-less.html phoenix/site/publish/array_type.html phoenix/site/publish/building.html phoenix/site/publish/building_website.html phoenix/site/publish/bulk_dataload.html phoenix/site/publish/contributing.html phoenix/site/publish/download.html phoenix/site/publish/dynamic_columns.html phoenix/site/publish/faq.html phoenix/site/publish/flume.html phoenix/site/publish/index.html phoenix/site/publish/issues.html phoenix/site/publish/joins.html phoenix/site/publish/language/datatypes.html phoenix/site/publish/language/functions.html phoenix/site/publish/language/index.html phoenix/site/publish/mailing_list.html phoenix/site/publish/multi-tenancy.html phoenix/site/publish/paged.html phoenix/site/publish/performance.html phoenix/site/publish/phoenix_on_emr.html phoenix/site/publish/pig_integration.html phoenix/site/publish/recent.html phoenix/site/publish/resources.html phoenix/site/publish/roadmap.html phoenix/site/publish/salted.html phoenix/site/publish/secondary_indexing.html phoenix/site/publish/sequences.html phoenix/site/publish/skip_scan.html phoenix/site/publish/source.html phoenix/site/publish/team.html phoenix/site/publish/tuning.html phoenix/site/publish/upgrade_from_2_2.html phoenix/site/publish/views.html phoenix/site/source/src/site/site.xml Modified: phoenix/phoenix-docs/src/main/org/h2/Driver.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/Driver.java?rev=1618075&r1=1618074&r2=1618075&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/Driver.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/Driver.java Thu Aug 14 22:21:43 2014 @@ -17,7 +17,7 @@ import org.h2.message.DbException; import org.h2.message.TraceSystem; import org.h2.upgrade.DbUpgrade; -/*## Java 1.7 ## +//## Java 1.7 ## import java.util.logging.Logger; //*/ @@ -138,7 +138,7 @@ public class Driver implements java.sql. /** * [Not supported] */ -/*## Java 1.7 ## +//## Java 1.7 ## public Logger getParentLogger() { return null; } Modified: phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java?rev=1618075&r1=1618074&r2=1618075&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java Thu Aug 14 22:21:43 2014 @@ -1440,7 +1440,7 @@ public class JdbcCallableStatement exten * @param parameterIndex the parameter index (1, 2, ...) * @param type the class of the returned value */ -/*## Java 1.7 ## +//## Java 1.7 ## public T getObject(int parameterIndex, Class type) { return null; } @@ -1452,7 +1452,7 @@ public class JdbcCallableStatement exten * @param parameterName the parameter name * @param type the class of the returned value */ -/*## Java 1.7 ## +//## Java 1.7 ## public T getObject(String parameterName, Class type) { return null; } Modified: phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java?rev=1618075&r1=1618074&r2=1618075&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java Thu Aug 14 22:21:43 2014 @@ -49,7 +49,7 @@ import java.sql.SQLXML; import java.sql.SQLClientInfoException; //*/ -/*## Java 1.7 ## +//## Java 1.7 ## import java.util.concurrent.Executor; //*/ @@ -1678,7 +1678,7 @@ public class JdbcConnection extends Trac * * @param schema the schema
phoenix git commit: PHOENIX-1965 Upgrade Pig to version 0.13 (Prashant Kommireddi)
Repository: phoenix Updated Branches: refs/heads/master c1e5c71ab -> a1032fba3 PHOENIX-1965 Upgrade Pig to version 0.13 (Prashant Kommireddi) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a1032fba Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a1032fba Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a1032fba Branch: refs/heads/master Commit: a1032fba34164b9ac9c62d2187302cdc0e8b2846 Parents: c1e5c71 Author: Jesse Yates Authored: Wed May 13 10:00:52 2015 -0700 Committer: Jesse Yates Committed: Thu May 14 12:52:57 2015 -0700 -- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/a1032fba/pom.xml -- diff --git a/pom.xml b/pom.xml index 23ac578..eec1f2a 100644 --- a/pom.xml +++ b/pom.xml @@ -85,7 +85,7 @@ 1.2 2.5.1 -0.12.0 +0.13.0 1.8.8 3.5 1.2.17
phoenix git commit: PHOENIX-1965 Upgrade Pig to version 0.13 (Prashant Kommireddi)
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-1.0 555db6456 -> 9d92e6811 PHOENIX-1965 Upgrade Pig to version 0.13 (Prashant Kommireddi) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9d92e681 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9d92e681 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9d92e681 Branch: refs/heads/4.x-HBase-1.0 Commit: 9d92e68119816847d507a8ff984461f86bcce07f Parents: 555db64 Author: Jesse Yates Authored: Wed May 13 10:00:52 2015 -0700 Committer: Jesse Yates Committed: Thu May 14 13:17:29 2015 -0700 -- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/9d92e681/pom.xml -- diff --git a/pom.xml b/pom.xml index 40d4775..b2e6a1c 100644 --- a/pom.xml +++ b/pom.xml @@ -85,7 +85,7 @@ 1.2 2.5.1 -0.12.0 +0.13.0 1.8.8 3.5 1.2.17
phoenix git commit: PHOENIX-1965 Upgrade Pig to version 0.13 (Prashant Kommireddi)
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-0.98 56afcb35c -> 9c15d3ee6 PHOENIX-1965 Upgrade Pig to version 0.13 (Prashant Kommireddi) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9c15d3ee Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9c15d3ee Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9c15d3ee Branch: refs/heads/4.x-HBase-0.98 Commit: 9c15d3ee6a2630dcfa7723424b0c965531b4afd3 Parents: 56afcb3 Author: Jesse Yates Authored: Wed May 13 10:00:52 2015 -0700 Committer: Jesse Yates Committed: Thu May 14 13:17:12 2015 -0700 -- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/9c15d3ee/pom.xml -- diff --git a/pom.xml b/pom.xml index 2012f52..331ecdf 100644 --- a/pom.xml +++ b/pom.xml @@ -85,7 +85,7 @@ 1.2 1.0.4 -0.12.0 +0.13.0 1.8.8 3.5 1.2.17
git commit: PHOENIX-1187 Enable tracing on server
Repository: phoenix Updated Branches: refs/heads/master 4fcd01d6a -> f99e5d8d6 PHOENIX-1187 Enable tracing on server Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f99e5d8d Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f99e5d8d Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f99e5d8d Branch: refs/heads/master Commit: f99e5d8d609d326fb3571255cd8f47961b1c6860 Parents: 4fcd01d Author: Jesse Yates Authored: Tue Aug 19 15:25:57 2014 -0700 Committer: Jesse Yates Committed: Wed Aug 20 13:43:38 2014 -0700 -- phoenix-assembly/pom.xml| 2 +- phoenix-assembly/src/build/all.xml | 184 -- .../components/all-common-dependencies.xml | 62 ++ .../src/build/components/all-common-files.xml | 71 +++ .../src/build/components/all-common-jars.xml| 74 +++ phoenix-assembly/src/build/hadoop-one-all.xml | 53 + phoenix-assembly/src/build/hadoop-two-all.xml | 64 +++ .../coprocessor/MetaDataEndpointImpl.java | 11 ++ .../apache/phoenix/jdbc/PhoenixConnection.java | 9 +- .../trace/PhoenixTableMetricsWriter.java| 45 +++-- .../org/apache/phoenix/trace/util/Tracing.java | 22 +++ .../java/org/apache/phoenix/util/QueryUtil.java | 33 +++- .../org/apache/phoenix/metrics/Metrics.java | 41 .../org/apache/phoenix/trace/TracingCompat.java | 3 + .../bin/hadoop-metrics2-hbase.properties| 20 ++ .../bin/hadoop-metrics2-phoenix.properties | 53 + .../phoenix/trace/PhoenixMetricsSink.java | 191 +++ .../phoenix/trace/PhoenixMetricsWriter.java | 176 - .../apache/phoenix/trace/TraceMetricSource.java | 13 +- .../phoenix/trace/PhoenixMetricsWriterTest.java | 2 +- pom.xml | 6 +- 21 files changed, 740 insertions(+), 395 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/f99e5d8d/phoenix-assembly/pom.xml -- diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml index d8bd2d2..4921731 100644 --- a/phoenix-assembly/pom.xml +++ b/phoenix-assembly/pom.xml @@ -72,7 +72,7 @@ gnu false -src/build/all.xml +src/build/${assembly.descriptor} http://git-wip-us.apache.org/repos/asf/phoenix/blob/f99e5d8d/phoenix-assembly/src/build/all.xml -- diff --git a/phoenix-assembly/src/build/all.xml b/phoenix-assembly/src/build/all.xml deleted file mode 100644 index 8fa5884..000 --- a/phoenix-assembly/src/build/all.xml +++ /dev/null @@ -1,184 +0,0 @@ - - - -http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"; - xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; - xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd";> - - all - -tar.gz - - true - - - - - - ${project.basedir}/.. - / - -*.txt - - - - ${project.basedir}/../bin - bin - 0755 - 0755 - -*.py -*.sh - - - - ${project.basedir}/../bin - bin - 0644 - 0755 - -*.py/ -*.sh/ - - - - ${project.basedir}/../dev - 0644 - 0755 - - - ${project.basedir}/../docs - 0644 - 0755 - - - ${project.basedir}/../examples - 0644 - 0755 - - - - target - / - -phoenix-*-client.jar -phoenix-*-mapreduce.jar - - - - - ${project.basedir}/../phoenix-hadoop-compat/target/ - lib - -phoenix-*.jar - - 0644 - - - ${project.basedir}/../phoenix-hadoop1-compat/target/ - lib - -phoenix-*.jar - - 0644 - - - ${project.basedir}/../phoenix-hadoop2-compat/target/ - lib - -phoenix-*.jar - - 0644 - - - ${project.basedir}/../phoenix-pig/target/ - lib - -phoenix-*.jar - - 0644 - - - ${project.basedir}/../phoenix-flume/target/ - lib - -phoenix-*.jar - - 0644 - - - ${project.basedir}/../phoenix-core/target/ - lib - -phoenix-*.jar - - - - - 0644 - - - - - - - - false - /lib - -commons-configuration:commons-configuration -commons-io:commons-io -commons-lang:commons-lang -
git commit: PHOENIX-1187 Enable tracing on server
Repository: phoenix Updated Branches: refs/heads/4.0 d1a8f49a8 -> 572724ef8 PHOENIX-1187 Enable tracing on server Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/572724ef Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/572724ef Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/572724ef Branch: refs/heads/4.0 Commit: 572724ef8bcab8e48eefec910a8654114e2bfebb Parents: d1a8f49 Author: Jesse Yates Authored: Tue Aug 19 15:25:57 2014 -0700 Committer: Jesse Yates Committed: Wed Aug 20 13:42:43 2014 -0700 -- phoenix-assembly/pom.xml| 2 +- phoenix-assembly/src/build/all.xml | 184 -- .../components/all-common-dependencies.xml | 62 ++ .../src/build/components/all-common-files.xml | 71 +++ .../src/build/components/all-common-jars.xml| 74 +++ phoenix-assembly/src/build/hadoop-one-all.xml | 53 + phoenix-assembly/src/build/hadoop-two-all.xml | 64 +++ .../coprocessor/MetaDataEndpointImpl.java | 11 ++ .../apache/phoenix/jdbc/PhoenixConnection.java | 9 +- .../trace/PhoenixTableMetricsWriter.java| 45 +++-- .../org/apache/phoenix/trace/util/Tracing.java | 22 +++ .../java/org/apache/phoenix/util/QueryUtil.java | 33 +++- .../org/apache/phoenix/metrics/Metrics.java | 41 .../org/apache/phoenix/trace/TracingCompat.java | 3 + .../bin/hadoop-metrics2-hbase.properties| 20 ++ .../bin/hadoop-metrics2-phoenix.properties | 53 + .../phoenix/trace/PhoenixMetricsSink.java | 191 +++ .../phoenix/trace/PhoenixMetricsWriter.java | 176 - .../apache/phoenix/trace/TraceMetricSource.java | 13 +- .../phoenix/trace/PhoenixMetricsWriterTest.java | 2 +- pom.xml | 6 +- 21 files changed, 740 insertions(+), 395 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-assembly/pom.xml -- diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml index b3c24d2..a34b09c 100644 --- a/phoenix-assembly/pom.xml +++ b/phoenix-assembly/pom.xml @@ -72,7 +72,7 @@ gnu false -src/build/all.xml +src/build/${assembly.descriptor} http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-assembly/src/build/all.xml -- diff --git a/phoenix-assembly/src/build/all.xml b/phoenix-assembly/src/build/all.xml deleted file mode 100644 index 8fa5884..000 --- a/phoenix-assembly/src/build/all.xml +++ /dev/null @@ -1,184 +0,0 @@ - - - -http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"; - xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; - xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd";> - - all - -tar.gz - - true - - - - - - ${project.basedir}/.. - / - -*.txt - - - - ${project.basedir}/../bin - bin - 0755 - 0755 - -*.py -*.sh - - - - ${project.basedir}/../bin - bin - 0644 - 0755 - -*.py/ -*.sh/ - - - - ${project.basedir}/../dev - 0644 - 0755 - - - ${project.basedir}/../docs - 0644 - 0755 - - - ${project.basedir}/../examples - 0644 - 0755 - - - - target - / - -phoenix-*-client.jar -phoenix-*-mapreduce.jar - - - - - ${project.basedir}/../phoenix-hadoop-compat/target/ - lib - -phoenix-*.jar - - 0644 - - - ${project.basedir}/../phoenix-hadoop1-compat/target/ - lib - -phoenix-*.jar - - 0644 - - - ${project.basedir}/../phoenix-hadoop2-compat/target/ - lib - -phoenix-*.jar - - 0644 - - - ${project.basedir}/../phoenix-pig/target/ - lib - -phoenix-*.jar - - 0644 - - - ${project.basedir}/../phoenix-flume/target/ - lib - -phoenix-*.jar - - 0644 - - - ${project.basedir}/../phoenix-core/target/ - lib - -phoenix-*.jar - - - - - 0644 - - - - - - - - false - /lib - -commons-configuration:commons-configuration -commons-io:commons-io -commons-lang:commons-lang -com
git commit: PHOENIX-1187 Addendum: fix hadoop2-compat META-INF
Repository: phoenix Updated Branches: refs/heads/master 29a7be42b -> 3acf9c8ef PHOENIX-1187 Addendum: fix hadoop2-compat META-INF Necessary because PhoenixMetricsWriter changed name to PhoenixMetricsSink Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3acf9c8e Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3acf9c8e Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3acf9c8e Branch: refs/heads/master Commit: 3acf9c8efa5dd8b72629274ce7805d79fd467400 Parents: 29a7be4 Author: Jesse Yates Authored: Thu Aug 21 09:47:42 2014 -0700 Committer: Jesse Yates Committed: Thu Aug 21 09:55:19 2014 -0700 -- .../services/org.apache.phoenix.trace.TestableMetricsWriter| 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/3acf9c8e/phoenix-hadoop2-compat/src/main/resources/META-INF/services/org.apache.phoenix.trace.TestableMetricsWriter -- diff --git a/phoenix-hadoop2-compat/src/main/resources/META-INF/services/org.apache.phoenix.trace.TestableMetricsWriter b/phoenix-hadoop2-compat/src/main/resources/META-INF/services/org.apache.phoenix.trace.TestableMetricsWriter index b10af39..7c9e107 100644 --- a/phoenix-hadoop2-compat/src/main/resources/META-INF/services/org.apache.phoenix.trace.TestableMetricsWriter +++ b/phoenix-hadoop2-compat/src/main/resources/META-INF/services/org.apache.phoenix.trace.TestableMetricsWriter @@ -1 +1 @@ -org.apache.phoenix.trace.PhoenixMetricsWriter \ No newline at end of file +org.apache.phoenix.trace.PhoenixMetricsSink \ No newline at end of file
git commit: PHOENIX-1187 Addendum: fix hadoop2-compat META-INF
Repository: phoenix Updated Branches: refs/heads/4.0 1987fba12 -> 9af70445c PHOENIX-1187 Addendum: fix hadoop2-compat META-INF Necessary because PhoenixMetricsWriter changed name to PhoenixMetricsSink Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9af70445 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9af70445 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9af70445 Branch: refs/heads/4.0 Commit: 9af70445c265f9b12d2f28eba694df3425598e94 Parents: 1987fba Author: Jesse Yates Authored: Thu Aug 21 09:47:42 2014 -0700 Committer: Jesse Yates Committed: Thu Aug 21 09:49:22 2014 -0700 -- .../services/org.apache.phoenix.trace.TestableMetricsWriter| 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/9af70445/phoenix-hadoop2-compat/src/main/resources/META-INF/services/org.apache.phoenix.trace.TestableMetricsWriter -- diff --git a/phoenix-hadoop2-compat/src/main/resources/META-INF/services/org.apache.phoenix.trace.TestableMetricsWriter b/phoenix-hadoop2-compat/src/main/resources/META-INF/services/org.apache.phoenix.trace.TestableMetricsWriter index b10af39..7c9e107 100644 --- a/phoenix-hadoop2-compat/src/main/resources/META-INF/services/org.apache.phoenix.trace.TestableMetricsWriter +++ b/phoenix-hadoop2-compat/src/main/resources/META-INF/services/org.apache.phoenix.trace.TestableMetricsWriter @@ -1 +1 @@ -org.apache.phoenix.trace.PhoenixMetricsWriter \ No newline at end of file +org.apache.phoenix.trace.PhoenixMetricsSink \ No newline at end of file
svn commit: r1619499 - in /phoenix: phoenix-docs/src/main/org/h2/ phoenix-docs/src/main/org/h2/jdbc/ phoenix-docs/src/main/org/h2/jdbcx/ phoenix-docs/src/main/org/h2/tools/ site/publish/ site/source/s
Author: jyates Date: Thu Aug 21 17:48:54 2014 New Revision: 1619499 URL: http://svn.apache.org/r1619499 Log: Updating tracing docs after PHOENIX-1187 Modified: phoenix/phoenix-docs/src/main/org/h2/Driver.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcResultSet.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcStatement.java phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcConnectionPool.java phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcDataSource.java phoenix/phoenix-docs/src/main/org/h2/tools/SimpleResultSet.java phoenix/site/publish/tracing.html phoenix/site/source/src/site/markdown/tracing.md Modified: phoenix/phoenix-docs/src/main/org/h2/Driver.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/Driver.java?rev=1619499&r1=1619498&r2=1619499&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/Driver.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/Driver.java Thu Aug 21 17:48:54 2014 @@ -17,7 +17,7 @@ import org.h2.message.DbException; import org.h2.message.TraceSystem; import org.h2.upgrade.DbUpgrade; -/*## Java 1.7 ## +//## Java 1.7 ## import java.util.logging.Logger; //*/ @@ -138,7 +138,7 @@ public class Driver implements java.sql. /** * [Not supported] */ -/*## Java 1.7 ## +//## Java 1.7 ## public Logger getParentLogger() { return null; } Modified: phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java?rev=1619499&r1=1619498&r2=1619499&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java Thu Aug 21 17:48:54 2014 @@ -1440,7 +1440,7 @@ public class JdbcCallableStatement exten * @param parameterIndex the parameter index (1, 2, ...) * @param type the class of the returned value */ -/*## Java 1.7 ## +//## Java 1.7 ## public T getObject(int parameterIndex, Class type) { return null; } @@ -1452,7 +1452,7 @@ public class JdbcCallableStatement exten * @param parameterName the parameter name * @param type the class of the returned value */ -/*## Java 1.7 ## +//## Java 1.7 ## public T getObject(String parameterName, Class type) { return null; } Modified: phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java?rev=1619499&r1=1619498&r2=1619499&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java Thu Aug 21 17:48:54 2014 @@ -49,7 +49,7 @@ import java.sql.SQLXML; import java.sql.SQLClientInfoException; //*/ -/*## Java 1.7 ## +//## Java 1.7 ## import java.util.concurrent.Executor; //*/ @@ -1678,7 +1678,7 @@ public class JdbcConnection extends Trac * * @param schema the schema */ -/*## Java 1.7 ## +//## Java 1.7 ## public void setSchema(String schema) { // not supported } @@ -1687,7 +1687,7 @@ public class JdbcConnection extends Trac /** * [Not supported] */ -/*## Java 1.7 ## +//## Java 1.7 ## public String getSchema() { return null; } @@ -1698,7 +1698,7 @@ public class JdbcConnection extends Trac * * @param executor the executor used by this method */ -/*## Java 1.7 ## +//## Java 1.7 ## public void abort(Executor executor) { // not supported } @@ -1710,7 +1710,7 @@ public class JdbcConnection extends Trac * @param executor the executor used by this method * @param milliseconds the TCP connection timeout */ -/*## Java 1.7 ## +//## Java 1.7 ## public void setNetworkTimeout(Executor executor, int milliseconds) { // not supported } @@ -1719,7 +1719,7 @@ public class JdbcConnection extends Trac /** * [Not supported] */ -/*## Java 1.7 ## +//## Java 1.7 ## public int getNetworkTimeout() { return 0; } Modified: phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java?rev=1619499&r1=1619498&r2=1619499&view=diff == --- phoenix/phoenix-doc
svn commit: r1619954 - in /phoenix: ./ phoenix-docs/src/main/org/h2/ phoenix-docs/src/main/org/h2/jdbc/ phoenix-docs/src/main/org/h2/jdbcx/ phoenix-docs/src/main/org/h2/tools/ site/publish/ site/sourc
Author: jyates Date: Sat Aug 23 00:48:58 2014 New Revision: 1619954 URL: http://svn.apache.org/r1619954 Log: Updating tracing docs to include actual config values, updating build.sh to remove temp file Modified: phoenix/build.sh phoenix/phoenix-docs/src/main/org/h2/Driver.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcDatabaseMetaData.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcResultSet.java phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcStatement.java phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcConnectionPool.java phoenix/phoenix-docs/src/main/org/h2/jdbcx/JdbcDataSource.java phoenix/phoenix-docs/src/main/org/h2/tools/SimpleResultSet.java phoenix/site/publish/tracing.html phoenix/site/source/src/site/markdown/tracing.md Modified: phoenix/build.sh URL: http://svn.apache.org/viewvc/phoenix/build.sh?rev=1619954&r1=1619953&r2=1619954&view=diff == --- phoenix/build.sh (original) +++ phoenix/build.sh Sat Aug 23 00:48:58 2014 @@ -12,3 +12,11 @@ echo "BUILDING SITE" echo "===" cd ../site/source/ mvn clean site + +echo "" +echo "Removing temp directories" +echo "===" +cd ../../ +rm -rf phoenix-docs/temp +rm -rf phoenix-docs/bin +rm -rf phoenix-docs/docs Modified: phoenix/phoenix-docs/src/main/org/h2/Driver.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/Driver.java?rev=1619954&r1=1619953&r2=1619954&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/Driver.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/Driver.java Sat Aug 23 00:48:58 2014 @@ -17,7 +17,7 @@ import org.h2.message.DbException; import org.h2.message.TraceSystem; import org.h2.upgrade.DbUpgrade; -/*## Java 1.7 ## +//## Java 1.7 ## import java.util.logging.Logger; //*/ @@ -138,7 +138,7 @@ public class Driver implements java.sql. /** * [Not supported] */ -/*## Java 1.7 ## +//## Java 1.7 ## public Logger getParentLogger() { return null; } Modified: phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java?rev=1619954&r1=1619953&r2=1619954&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcCallableStatement.java Sat Aug 23 00:48:58 2014 @@ -1440,7 +1440,7 @@ public class JdbcCallableStatement exten * @param parameterIndex the parameter index (1, 2, ...) * @param type the class of the returned value */ -/*## Java 1.7 ## +//## Java 1.7 ## public T getObject(int parameterIndex, Class type) { return null; } @@ -1452,7 +1452,7 @@ public class JdbcCallableStatement exten * @param parameterName the parameter name * @param type the class of the returned value */ -/*## Java 1.7 ## +//## Java 1.7 ## public T getObject(String parameterName, Class type) { return null; } Modified: phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java URL: http://svn.apache.org/viewvc/phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java?rev=1619954&r1=1619953&r2=1619954&view=diff == --- phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java (original) +++ phoenix/phoenix-docs/src/main/org/h2/jdbc/JdbcConnection.java Sat Aug 23 00:48:58 2014 @@ -49,7 +49,7 @@ import java.sql.SQLXML; import java.sql.SQLClientInfoException; //*/ -/*## Java 1.7 ## +//## Java 1.7 ## import java.util.concurrent.Executor; //*/ @@ -1678,7 +1678,7 @@ public class JdbcConnection extends Trac * * @param schema the schema */ -/*## Java 1.7 ## +//## Java 1.7 ## public void setSchema(String schema) { // not supported } @@ -1687,7 +1687,7 @@ public class JdbcConnection extends Trac /** * [Not supported] */ -/*## Java 1.7 ## +//## Java 1.7 ## public String getSchema() { return null; } @@ -1698,7 +1698,7 @@ public class JdbcConnection extends Trac * * @param executor the executor used by this method */ -/*## Java 1.7 ## +//## Java 1.7 ## public void abort(Executor executor) { // not supported } @@ -1710,7 +1710,7 @@ public class JdbcConnection extends Trac * @param executor the executor used by this method * @param milliseconds the TCP connection timeout
git commit: PHOENIX-1234 QueryUtil doesn't parse zk hosts correctly
Repository: phoenix Updated Branches: refs/heads/master ec3be54ef -> 4a1ec7ec4 PHOENIX-1234 QueryUtil doesn't parse zk hosts correctly Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/4a1ec7ec Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/4a1ec7ec Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/4a1ec7ec Branch: refs/heads/master Commit: 4a1ec7ec44248315023db41cf4c941a366a1d294 Parents: ec3be54 Author: Jesse Yates Authored: Thu Sep 4 09:44:10 2014 -0700 Committer: Jesse Yates Committed: Thu Sep 4 09:44:10 2014 -0700 -- .../java/org/apache/phoenix/util/QueryUtil.java | 55 +++- .../java/org/apache/phoenix/query/BaseTest.java | 16 +++--- .../org/apache/phoenix/util/QueryUtilTest.java | 55 +++- 3 files changed, 102 insertions(+), 24 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/4a1ec7ec/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java index 6a45666..88ffd8e 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java @@ -25,6 +25,7 @@ import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.ArrayList; import java.util.List; import java.util.Properties; @@ -35,6 +36,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.Addressing; +import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.phoenix.jdbc.PhoenixDriver; @@ -43,6 +45,7 @@ import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import org.apache.phoenix.query.QueryServices; public final class QueryUtil { @@ -189,26 +192,50 @@ public final class QueryUtil { } public static Connection getConnection(Properties props, Configuration conf) +throws ClassNotFoundException, +SQLException { +String url = getConnectionUrl(props, conf); +LOG.info("Creating connection with the jdbc url:" + url); +return DriverManager.getConnection(url, props); +} + +public static String getConnectionUrl(Properties props, Configuration conf) throws ClassNotFoundException, SQLException { // make sure we load the phoenix driver Class.forName(PhoenixDriver.class.getName()); // read the hbase properties from the configuration String server = ZKConfig.getZKQuorumServersString(conf); -int port; -// if it has a port, don't try to add one -try { -server = Addressing.parseHostname(server); -port = Addressing.parsePort(server); -} catch (IllegalArgumentException e) { -// port isn't set -port = -conf.getInt(HConstants.ZOOKEEPER_CLIENT_PORT, -HConstants.DEFAULT_ZOOKEPER_CLIENT_PORT); +// could be a comma-separated list +String[] rawServers = server.split(","); +List servers = new ArrayList(rawServers.length); +boolean first = true; +int port = -1; +for (String serverPort : rawServers) { +try { +server = Addressing.parseHostname(serverPort); +int specifiedPort = Addressing.parsePort(serverPort); +// there was a previously specified port and it doesn't match this server +if (port > 0 && specifiedPort != port) { +throw new IllegalStateException("Phoenix/HBase only supports connecting to a " + +"single zookeeper client port. Specify servers only as host names in " + +"HBase configuration"); +} +// set the port to the specified port +port = specifiedPort; +servers.add(server); +} catch (IllegalArgumentException e) { +} +} +// port wasn't set, shouldn't ever happen from HBase, but just in case +if (port == -1) { +port = conf.getInt(QueryServices.ZOOKEEPER_PORT_ATTRIB, -1); +if (port == -1) { +throw new RuntimeException("Client zk port was not set!"); +} } +server = Joiner.on(',').joi
git commit: PHOENIX-1234 QueryUtil doesn't parse zk hosts correctly
Repository: phoenix Updated Branches: refs/heads/4.0 c4ede66fe -> a9a128bfa PHOENIX-1234 QueryUtil doesn't parse zk hosts correctly Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a9a128bf Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a9a128bf Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a9a128bf Branch: refs/heads/4.0 Commit: a9a128bfa82f0f978f2260a119a4e88ff1db6330 Parents: c4ede66 Author: Jesse Yates Authored: Thu Sep 4 09:44:10 2014 -0700 Committer: Jesse Yates Committed: Thu Sep 4 09:44:46 2014 -0700 -- .../java/org/apache/phoenix/util/QueryUtil.java | 55 +++- .../java/org/apache/phoenix/query/BaseTest.java | 16 +++--- .../org/apache/phoenix/util/QueryUtilTest.java | 55 +++- 3 files changed, 102 insertions(+), 24 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/a9a128bf/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java index da6b17a..7f5d4c6 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java @@ -25,6 +25,7 @@ import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.ArrayList; import java.util.List; import java.util.Properties; @@ -35,6 +36,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.Addressing; +import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.phoenix.jdbc.PhoenixDriver; @@ -43,6 +45,7 @@ import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import org.apache.phoenix.query.QueryServices; public final class QueryUtil { @@ -190,26 +193,50 @@ public final class QueryUtil { } public static Connection getConnection(Properties props, Configuration conf) +throws ClassNotFoundException, +SQLException { +String url = getConnectionUrl(props, conf); +LOG.info("Creating connection with the jdbc url:" + url); +return DriverManager.getConnection(url, props); +} + +public static String getConnectionUrl(Properties props, Configuration conf) throws ClassNotFoundException, SQLException { // make sure we load the phoenix driver Class.forName(PhoenixDriver.class.getName()); // read the hbase properties from the configuration String server = ZKConfig.getZKQuorumServersString(conf); -int port; -// if it has a port, don't try to add one -try { -server = Addressing.parseHostname(server); -port = Addressing.parsePort(server); -} catch (IllegalArgumentException e) { -// port isn't set -port = -conf.getInt(HConstants.ZOOKEEPER_CLIENT_PORT, -HConstants.DEFAULT_ZOOKEPER_CLIENT_PORT); +// could be a comma-separated list +String[] rawServers = server.split(","); +List servers = new ArrayList(rawServers.length); +boolean first = true; +int port = -1; +for (String serverPort : rawServers) { +try { +server = Addressing.parseHostname(serverPort); +int specifiedPort = Addressing.parsePort(serverPort); +// there was a previously specified port and it doesn't match this server +if (port > 0 && specifiedPort != port) { +throw new IllegalStateException("Phoenix/HBase only supports connecting to a " + +"single zookeeper client port. Specify servers only as host names in " + +"HBase configuration"); +} +// set the port to the specified port +port = specifiedPort; +servers.add(server); +} catch (IllegalArgumentException e) { +} +} +// port wasn't set, shouldn't ever happen from HBase, but just in case +if (port == -1) { +port = conf.getInt(QueryServices.ZOOKEEPER_PORT_ATTRIB, -1); +if (port == -1) { +throw new RuntimeException("Client zk port was not set!"); +} } +server = Joiner.on(',').join(serv
git commit: PHOENIX-1226 Exception in Tracing
Repository: phoenix Updated Branches: refs/heads/4.0 a9a128bfa -> 4c8441b5f PHOENIX-1226 Exception in Tracing Phoenix tracing expects annotation values to be integers, but HBase writes non-integer values in its traces (which end up in the phoenix sink) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/4c8441b5 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/4c8441b5 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/4c8441b5 Branch: refs/heads/4.0 Commit: 4c8441b5f3813e4ed5ff6d0522e946bf1892bd29 Parents: a9a128b Author: Jesse Yates Authored: Fri Sep 5 16:44:15 2014 -0700 Committer: Jesse Yates Committed: Fri Sep 5 16:44:43 2014 -0700 -- .../org/apache/phoenix/trace/TracingCompat.java | 4 +- .../phoenix/trace/TraceMetricsSourceTest.java | 96 2 files changed, 98 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/4c8441b5/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java -- diff --git a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java index 783bfd6..032e38a 100644 --- a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java +++ b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java @@ -49,11 +49,11 @@ public class TracingCompat { public static final String METRICS_MARKER_CONTEXT = "marker"; public static void addAnnotation(Span span, String message, int value) { -span.addKVAnnotation(message.getBytes(), Bytes.toBytes(value)); +span.addKVAnnotation(message.getBytes(), Bytes.toBytes(Integer.toString(value))); } public static Pair readAnnotation(byte[] key, byte[] value) { -return new Pair(new String(key), Integer.toString(Bytes.toInt(value))); +return new Pair(new String(key), Bytes.toString(value)); } public static MetricsWriter initializeWriter(String clazz) { http://git-wip-us.apache.org/repos/asf/phoenix/blob/4c8441b5/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java -- diff --git a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java b/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java new file mode 100644 index 000..3258e8a --- /dev/null +++ b/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java @@ -0,0 +1,96 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.trace; + +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.metrics2.MetricsCollector; +import org.apache.hadoop.metrics2.MetricsRecordBuilder; +import org.apache.hadoop.metrics2.MetricsTag; +import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import org.cloudera.htrace.Span; +import org.cloudera.htrace.impl.MilliSpan; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mockito.Mockito; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Test that the @{link TraceMetricSource} correctly handles different kinds of traces + */ +public class TraceMetricsSourceTest { + + @BeforeClass + public static void setup() throws Exception{ +DefaultMetricsSystem.setMiniClusterMode(true); + } + + /** + * For PHOENIX-1126, Phoenix originally assumed all the annotation values were integers, + * but HBase writes some strings as well, so we need to be able to handle that too + */ + @Test + public void testNonIntegerAnnotations(){ +Span span = getSpan(); +// make sure its less than the length of an integer +byte[] value = Bytes.toBytes("a"); +byte[] someInt = Bytes.toBytes(1); +assertTrue(someInt.length
git commit: PHOENIX-1226 Exception in Tracing
Repository: phoenix Updated Branches: refs/heads/master 4a1ec7ec4 -> 284435b5a PHOENIX-1226 Exception in Tracing Phoenix tracing expects annotation values to be integers, but HBase writes non-integer values in its traces (which end up in the phoenix sink) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/284435b5 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/284435b5 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/284435b5 Branch: refs/heads/master Commit: 284435b5ab4e749febf13ad00f9050f6f25f9e15 Parents: 4a1ec7e Author: Jesse Yates Authored: Fri Sep 5 16:44:15 2014 -0700 Committer: Jesse Yates Committed: Fri Sep 5 16:44:36 2014 -0700 -- .../org/apache/phoenix/trace/TracingCompat.java | 4 +- .../phoenix/trace/TraceMetricsSourceTest.java | 96 2 files changed, 98 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/284435b5/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java -- diff --git a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java index 783bfd6..032e38a 100644 --- a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java +++ b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java @@ -49,11 +49,11 @@ public class TracingCompat { public static final String METRICS_MARKER_CONTEXT = "marker"; public static void addAnnotation(Span span, String message, int value) { -span.addKVAnnotation(message.getBytes(), Bytes.toBytes(value)); +span.addKVAnnotation(message.getBytes(), Bytes.toBytes(Integer.toString(value))); } public static Pair readAnnotation(byte[] key, byte[] value) { -return new Pair(new String(key), Integer.toString(Bytes.toInt(value))); +return new Pair(new String(key), Bytes.toString(value)); } public static MetricsWriter initializeWriter(String clazz) { http://git-wip-us.apache.org/repos/asf/phoenix/blob/284435b5/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java -- diff --git a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java b/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java new file mode 100644 index 000..3258e8a --- /dev/null +++ b/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java @@ -0,0 +1,96 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.trace; + +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.metrics2.MetricsCollector; +import org.apache.hadoop.metrics2.MetricsRecordBuilder; +import org.apache.hadoop.metrics2.MetricsTag; +import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import org.cloudera.htrace.Span; +import org.cloudera.htrace.impl.MilliSpan; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mockito.Mockito; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** + * Test that the @{link TraceMetricSource} correctly handles different kinds of traces + */ +public class TraceMetricsSourceTest { + + @BeforeClass + public static void setup() throws Exception{ +DefaultMetricsSystem.setMiniClusterMode(true); + } + + /** + * For PHOENIX-1126, Phoenix originally assumed all the annotation values were integers, + * but HBase writes some strings as well, so we need to be able to handle that too + */ + @Test + public void testNonIntegerAnnotations(){ +Span span = getSpan(); +// make sure its less than the length of an integer +byte[] value = Bytes.toBytes("a"); +byte[] someInt = Bytes.toBytes(1); +assertTrue(someInt.
git commit: PHOENIX-103 Drop hadoop1.0 specifics from code
Repository: phoenix Updated Branches: refs/heads/4.0 15939f295 -> a329d3f42 PHOENIX-103 Drop hadoop1.0 specifics from code Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/a329d3f4 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/a329d3f4 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/a329d3f4 Branch: refs/heads/4.0 Commit: a329d3f4222d0dd2ce41f4aecb740af1e75b6d2d Parents: 15939f2 Author: Jesse Yates Authored: Mon Sep 22 14:15:56 2014 -0700 Committer: Jesse Yates Committed: Tue Sep 23 14:23:50 2014 -0700 -- bin/hadoop-metrics2-hbase.properties| 20 ++ bin/hadoop-metrics2-phoenix.properties | 53 +++ phoenix-assembly/pom.xml| 4 +- phoenix-assembly/src/build/hadoop-one-all.xml | 53 --- phoenix-assembly/src/build/hadoop-two-all.xml | 64 .../src/build/package-to-tar-all.xml| 38 +++ phoenix-core/pom.xml| 280 +--- phoenix-flume/pom.xml | 276 +--- phoenix-hadoop1-compat/pom.xml | 40 --- .../bin/hadoop-metrics2-hbase.properties| 20 -- .../bin/hadoop-metrics2-phoenix.properties | 53 --- phoenix-pig/pom.xml | 292 +--- pom.xml | 329 ++- 13 files changed, 475 insertions(+), 1047 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/a329d3f4/bin/hadoop-metrics2-hbase.properties -- diff --git a/bin/hadoop-metrics2-hbase.properties b/bin/hadoop-metrics2-hbase.properties new file mode 100644 index 000..499ab6b --- /dev/null +++ b/bin/hadoop-metrics2-hbase.properties @@ -0,0 +1,20 @@ +# HBase Server Sink Configuration +# +# +# Configuration for the metrics2 system for the HBase RegionServers +# to enable phoenix trace collection on the HBase servers. +# +# See hadoop-metrics2-phoenix.properties for how these configurations +# are utilized. +# +# Either this file can be used in place of the standard +# hadoop-metrics2-hbase.properties file or the below +# properties should be added to the file of the same name on +# the HBase classpath (likely in the HBase conf/ folder) + +# ensure that we receive traces on the server +hbase.sink.tracing.class=org.apache.phoenix.trace.PhoenixMetricsSink +# Tell the sink where to write the metrics +hbase.sink.tracing.writer-class=org.apache.phoenix.trace.PhoenixTableMetricsWriter +# Only handle traces with a context of "tracing" +hbase.sink.tracing.context=tracing http://git-wip-us.apache.org/repos/asf/phoenix/blob/a329d3f4/bin/hadoop-metrics2-phoenix.properties -- diff --git a/bin/hadoop-metrics2-phoenix.properties b/bin/hadoop-metrics2-phoenix.properties new file mode 100644 index 000..4a5794d --- /dev/null +++ b/bin/hadoop-metrics2-phoenix.properties @@ -0,0 +1,53 @@ +# Metrics properties for phoenix + +# +#There are two options with file names: +# 1. hadoop-metrics2-[prefix].properties +# 2. hadoop-metrics2.properties +# Either will be loaded by the metrics system (but not both). +# +# NOTE: The metrics system is only initialized once per JVM (but does ref-counting, so we can't +#shutdown and restart), so we only load the first prefix that we find. Generally, this will be +#phoenix (unless someone else registers first, but for many clients, there should only be one). +# +# Usually, you would use hadoop-metrics2-phoenix.properties, but we use the generic +# hadoop-metrics2.properties to ensure it these are loaded regardless of where we are running, +# assuming there isn't another config on the classpath. + +# When specifying sinks, the syntax to use is: +#[prefix].[source|sink].[instance].[options] +# The interesting thing to note is that [instance] can literally be anything (as long as its +# not zero-length). It is only there to differentiate the properties that are stored for +# objects of the same type (e.g. differentiating between two phoenix.sink objects). +# +#You could the following lines in your config +# +# phoenix.sink.thingA.class=com.your-company.SpecialSink +# phoenix.sink.thingA.option1=value1 +# +# and also +# +# phoenix.sink.thingB.class=org.apache.phoenix.trace.PhoenixMetricsSink +# phoenix.sink.thingB.doGoodStuff=true +# +# which will create both SpecialSink and PhoenixMetricsSink and register them +# as a MetricsSink, but Special sink will only see option1=value1 in its +# configuration, which similarly, the instantiated PhoenixMetricsSink will +# only see doGoodStuff=true in its configuration +# +# See jav
git commit: PHOENIX-103 Drop hadoop1.0 specifics from code
Repository: phoenix Updated Branches: refs/heads/master 980783288 -> c72c4a65d PHOENIX-103 Drop hadoop1.0 specifics from code Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c72c4a65 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c72c4a65 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c72c4a65 Branch: refs/heads/master Commit: c72c4a65dc7b8406301fc6f9909f81195bc134a0 Parents: 9807832 Author: Jesse Yates Authored: Mon Sep 22 14:15:56 2014 -0700 Committer: Jesse Yates Committed: Tue Sep 23 14:22:34 2014 -0700 -- bin/hadoop-metrics2-hbase.properties| 20 ++ bin/hadoop-metrics2-phoenix.properties | 53 +++ phoenix-assembly/pom.xml| 4 +- phoenix-assembly/src/build/hadoop-one-all.xml | 53 --- phoenix-assembly/src/build/hadoop-two-all.xml | 64 .../src/build/package-to-tar-all.xml| 38 +++ phoenix-core/pom.xml| 280 +--- phoenix-flume/pom.xml | 276 +--- phoenix-hadoop1-compat/pom.xml | 40 --- .../bin/hadoop-metrics2-hbase.properties| 20 -- .../bin/hadoop-metrics2-phoenix.properties | 53 --- phoenix-pig/pom.xml | 292 +--- pom.xml | 329 ++- 13 files changed, 475 insertions(+), 1047 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/c72c4a65/bin/hadoop-metrics2-hbase.properties -- diff --git a/bin/hadoop-metrics2-hbase.properties b/bin/hadoop-metrics2-hbase.properties new file mode 100644 index 000..499ab6b --- /dev/null +++ b/bin/hadoop-metrics2-hbase.properties @@ -0,0 +1,20 @@ +# HBase Server Sink Configuration +# +# +# Configuration for the metrics2 system for the HBase RegionServers +# to enable phoenix trace collection on the HBase servers. +# +# See hadoop-metrics2-phoenix.properties for how these configurations +# are utilized. +# +# Either this file can be used in place of the standard +# hadoop-metrics2-hbase.properties file or the below +# properties should be added to the file of the same name on +# the HBase classpath (likely in the HBase conf/ folder) + +# ensure that we receive traces on the server +hbase.sink.tracing.class=org.apache.phoenix.trace.PhoenixMetricsSink +# Tell the sink where to write the metrics +hbase.sink.tracing.writer-class=org.apache.phoenix.trace.PhoenixTableMetricsWriter +# Only handle traces with a context of "tracing" +hbase.sink.tracing.context=tracing http://git-wip-us.apache.org/repos/asf/phoenix/blob/c72c4a65/bin/hadoop-metrics2-phoenix.properties -- diff --git a/bin/hadoop-metrics2-phoenix.properties b/bin/hadoop-metrics2-phoenix.properties new file mode 100644 index 000..4a5794d --- /dev/null +++ b/bin/hadoop-metrics2-phoenix.properties @@ -0,0 +1,53 @@ +# Metrics properties for phoenix + +# +#There are two options with file names: +# 1. hadoop-metrics2-[prefix].properties +# 2. hadoop-metrics2.properties +# Either will be loaded by the metrics system (but not both). +# +# NOTE: The metrics system is only initialized once per JVM (but does ref-counting, so we can't +#shutdown and restart), so we only load the first prefix that we find. Generally, this will be +#phoenix (unless someone else registers first, but for many clients, there should only be one). +# +# Usually, you would use hadoop-metrics2-phoenix.properties, but we use the generic +# hadoop-metrics2.properties to ensure it these are loaded regardless of where we are running, +# assuming there isn't another config on the classpath. + +# When specifying sinks, the syntax to use is: +#[prefix].[source|sink].[instance].[options] +# The interesting thing to note is that [instance] can literally be anything (as long as its +# not zero-length). It is only there to differentiate the properties that are stored for +# objects of the same type (e.g. differentiating between two phoenix.sink objects). +# +#You could the following lines in your config +# +# phoenix.sink.thingA.class=com.your-company.SpecialSink +# phoenix.sink.thingA.option1=value1 +# +# and also +# +# phoenix.sink.thingB.class=org.apache.phoenix.trace.PhoenixMetricsSink +# phoenix.sink.thingB.doGoodStuff=true +# +# which will create both SpecialSink and PhoenixMetricsSink and register them +# as a MetricsSink, but Special sink will only see option1=value1 in its +# configuration, which similarly, the instantiated PhoenixMetricsSink will +# only see doGoodStuff=true in its configuration +# +# S
git commit: PHOENIX-1289 Drop index during upsert may abort RS (daniel meng + jyates)
Repository: phoenix Updated Branches: refs/heads/4.0 30d496bcc -> 7701ae2ce PHOENIX-1289 Drop index during upsert may abort RS (daniel meng + jyates) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/7701ae2c Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/7701ae2c Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/7701ae2c Branch: refs/heads/4.0 Commit: 7701ae2cebd0f49a059998357d25e531fab4d80a Parents: 30d496b Author: Jesse Yates Authored: Mon Oct 6 10:58:14 2014 -0700 Committer: Jesse Yates Committed: Mon Oct 6 11:04:34 2014 -0700 -- .../end2end/index/DropIndexDuringUpsertIT.java | 177 ++ .../index/write/KillServerOnFailurePolicy.java | 2 +- .../index/PhoenixIndexFailurePolicy.java| 239 +++ 3 files changed, 316 insertions(+), 102 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/7701ae2c/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropIndexDuringUpsertIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropIndexDuringUpsertIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropIndexDuringUpsertIT.java new file mode 100644 index 000..4e44ec8 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropIndexDuringUpsertIT.java @@ -0,0 +1,177 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end.index; + +import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL; +import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR; +import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_TERMINATOR; +import static org.apache.phoenix.util.PhoenixRuntime.PHOENIX_TEST_DRIVER_URL_PARAM; +import static org.apache.phoenix.util.TestUtil.LOCALHOST; +import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest; +import org.apache.phoenix.jdbc.PhoenixTestDriver; +import org.apache.phoenix.query.BaseTest; +import org.apache.phoenix.query.QueryServices; +import org.apache.phoenix.schema.PIndexState; +import org.apache.phoenix.schema.PTableType; +import org.apache.phoenix.util.PropertiesUtil; +import org.apache.phoenix.util.ReadOnlyProps; +import org.apache.phoenix.util.SchemaUtil; +import org.apache.phoenix.util.StringUtil; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import com.google.common.collect.Maps; + +@Category(NeedsOwnMiniClusterTest.class) +public class DropIndexDuringUpsertIT extends BaseTest { +private static final int NUM_SLAVES = 4; +private static String url; +private static PhoenixTestDriver driver; +private static HBaseTestingUtility util; + +private static ExecutorService service = Executors.newCachedThreadPool(); + +private static final String SCHEMA_NAME = "S"; +private static final String INDEX_TABLE_NAME = "I"; +private static final String DATA_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, "T"); +private static final String INDEX_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, "I"); + +@Before +publi
git commit: PHOENIX-1289 Drop index during upsert may abort RS (daniel meng + jyates)
Repository: phoenix Updated Branches: refs/heads/master 909d97596 -> faeab9355 PHOENIX-1289 Drop index during upsert may abort RS (daniel meng + jyates) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/faeab935 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/faeab935 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/faeab935 Branch: refs/heads/master Commit: faeab935554404a042285a01127e9b88b8e3a47c Parents: 909d975 Author: Jesse Yates Authored: Mon Oct 6 10:58:14 2014 -0700 Committer: Jesse Yates Committed: Mon Oct 6 11:04:58 2014 -0700 -- .../end2end/index/DropIndexDuringUpsertIT.java | 177 ++ .../index/write/KillServerOnFailurePolicy.java | 2 +- .../index/PhoenixIndexFailurePolicy.java| 239 +++ 3 files changed, 316 insertions(+), 102 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/faeab935/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropIndexDuringUpsertIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropIndexDuringUpsertIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropIndexDuringUpsertIT.java new file mode 100644 index 000..4e44ec8 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/DropIndexDuringUpsertIT.java @@ -0,0 +1,177 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end.index; + +import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL; +import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR; +import static org.apache.phoenix.util.PhoenixRuntime.JDBC_PROTOCOL_TERMINATOR; +import static org.apache.phoenix.util.PhoenixRuntime.PHOENIX_TEST_DRIVER_URL_PARAM; +import static org.apache.phoenix.util.TestUtil.LOCALHOST; +import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest; +import org.apache.phoenix.jdbc.PhoenixTestDriver; +import org.apache.phoenix.query.BaseTest; +import org.apache.phoenix.query.QueryServices; +import org.apache.phoenix.schema.PIndexState; +import org.apache.phoenix.schema.PTableType; +import org.apache.phoenix.util.PropertiesUtil; +import org.apache.phoenix.util.ReadOnlyProps; +import org.apache.phoenix.util.SchemaUtil; +import org.apache.phoenix.util.StringUtil; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import com.google.common.collect.Maps; + +@Category(NeedsOwnMiniClusterTest.class) +public class DropIndexDuringUpsertIT extends BaseTest { +private static final int NUM_SLAVES = 4; +private static String url; +private static PhoenixTestDriver driver; +private static HBaseTestingUtility util; + +private static ExecutorService service = Executors.newCachedThreadPool(); + +private static final String SCHEMA_NAME = "S"; +private static final String INDEX_TABLE_NAME = "I"; +private static final String DATA_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, "T"); +private static final String INDEX_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, "I"); + +@Before +publi
git commit: PHOENIX-1305 create index throws NPE when dataTable has specified default column family (daniel meng)
Repository: phoenix Updated Branches: refs/heads/master faeab9355 -> ea0a502ce PHOENIX-1305 create index throws NPE when dataTable has specified default column family (daniel meng) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ea0a502c Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ea0a502c Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ea0a502c Branch: refs/heads/master Commit: ea0a502ce133972b29c18a984063006d6dcd9691 Parents: faeab93 Author: Jesse Yates Authored: Mon Oct 6 11:42:13 2014 -0700 Committer: Jesse Yates Committed: Mon Oct 6 11:42:20 2014 -0700 -- .../phoenix/end2end/index/MutableIndexIT.java | 50 .../phoenix/parse/CreateIndexStatement.java | 3 +- 2 files changed, 52 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea0a502c/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java index b093acb..8c9256d 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java @@ -34,10 +34,16 @@ import java.util.Map; import java.util.Properties; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.phoenix.compile.ColumnResolver; +import org.apache.phoenix.compile.FromCompiler; import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT; import org.apache.phoenix.end2end.HBaseManagedTimeTest; import org.apache.phoenix.end2end.Shadower; +import org.apache.phoenix.jdbc.PhoenixConnection; +import org.apache.phoenix.parse.NamedTableNode; +import org.apache.phoenix.parse.TableName; import org.apache.phoenix.query.QueryServices; +import org.apache.phoenix.schema.PTable; import org.apache.phoenix.util.MetaDataUtil; import org.apache.phoenix.util.PropertiesUtil; import org.apache.phoenix.util.QueryUtil; @@ -63,6 +69,50 @@ public class MutableIndexIT extends BaseMutableIndexIT { props.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.toString(true)); setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator())); } + +@Test +public void testIndexCreateWithoutOptions() throws Exception { +createIndexOnTableWithSpecifiedDefaultCF(false); +} + +@Test +public void testIndexCreateWithOptions() throws Exception { +createIndexOnTableWithSpecifiedDefaultCF(true); +} + +private void createIndexOnTableWithSpecifiedDefaultCF(boolean hasOptions) throws Exception { +String query; +ResultSet rs; + +Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); +Connection conn = DriverManager.getConnection(getUrl(), props); +conn.createStatement().execute( +"CREATE TABLE " + DATA_TABLE_FULL_NAME + " (k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 VARCHAR) DEFAULT_COLUMN_FAMILY='A'"); +query = "SELECT * FROM " + DATA_TABLE_FULL_NAME; +rs = conn.createStatement().executeQuery(query); +assertFalse(rs.next()); + +String options = hasOptions ? "SALT_BUCKETS=10, MULTI_TENANT=true, IMMUTABLE_ROWS=true, DISABLE_WAL=true" : ""; +conn.createStatement().execute( +"CREATE INDEX " + INDEX_TABLE_NAME + " ON " + DATA_TABLE_FULL_NAME + " (v1) INCLUDE (v2) " + options); +query = "SELECT * FROM " + INDEX_TABLE_FULL_NAME; +rs = conn.createStatement().executeQuery(query); +assertFalse(rs.next()); + +//check options set correctly on index +TableName indexName = TableName.create(SCHEMA_NAME, INDEX_TABLE_NAME); +NamedTableNode indexNode = NamedTableNode.create(null, indexName, null); +ColumnResolver resolver = FromCompiler.getResolver(indexNode, conn.unwrap(PhoenixConnection.class)); +PTable indexTable = resolver.getTables().get(0).getTable(); +// Can't set IMMUTABLE_ROWS, MULTI_TENANT or DEFAULT_COLUMN_FAMILY_NAME on an index +assertNull(indexTable.getDefaultFamilyName()); +assertFalse(indexTable.isMultiTenant()); +assertFalse(indexTable.isImmutableRows()); +if(hasOptions) { +assertEquals(10, indexTable.getBucketNum().intValue()); +assertTrue(indexTable.isWALDisabled()); +} +} @Test public void testIndexWithNullableFixedWithCols() throws Exception { http://git-wip-us.apache.org/repos/asf/phoenix/blob/ea0a502c/phoenix-core/src/main/java/org/apache/phoenix/parse/CreateIndexStatement.java ---
git commit: PHOENIX-1305 create index throws NPE when dataTable has specified default column family (daniel meng)
Repository: phoenix Updated Branches: refs/heads/4.0 7701ae2ce -> e9094d0a4 PHOENIX-1305 create index throws NPE when dataTable has specified default column family (daniel meng) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e9094d0a Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e9094d0a Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e9094d0a Branch: refs/heads/4.0 Commit: e9094d0a4cee18161197cffa57d2c9868c23b0a4 Parents: 7701ae2 Author: Jesse Yates Authored: Mon Oct 6 11:42:13 2014 -0700 Committer: Jesse Yates Committed: Mon Oct 6 11:42:13 2014 -0700 -- .../phoenix/end2end/index/MutableIndexIT.java | 50 .../phoenix/parse/CreateIndexStatement.java | 3 +- 2 files changed, 52 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/e9094d0a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java index b093acb..8c9256d 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexIT.java @@ -34,10 +34,16 @@ import java.util.Map; import java.util.Properties; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.phoenix.compile.ColumnResolver; +import org.apache.phoenix.compile.FromCompiler; import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT; import org.apache.phoenix.end2end.HBaseManagedTimeTest; import org.apache.phoenix.end2end.Shadower; +import org.apache.phoenix.jdbc.PhoenixConnection; +import org.apache.phoenix.parse.NamedTableNode; +import org.apache.phoenix.parse.TableName; import org.apache.phoenix.query.QueryServices; +import org.apache.phoenix.schema.PTable; import org.apache.phoenix.util.MetaDataUtil; import org.apache.phoenix.util.PropertiesUtil; import org.apache.phoenix.util.QueryUtil; @@ -63,6 +69,50 @@ public class MutableIndexIT extends BaseMutableIndexIT { props.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.toString(true)); setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator())); } + +@Test +public void testIndexCreateWithoutOptions() throws Exception { +createIndexOnTableWithSpecifiedDefaultCF(false); +} + +@Test +public void testIndexCreateWithOptions() throws Exception { +createIndexOnTableWithSpecifiedDefaultCF(true); +} + +private void createIndexOnTableWithSpecifiedDefaultCF(boolean hasOptions) throws Exception { +String query; +ResultSet rs; + +Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); +Connection conn = DriverManager.getConnection(getUrl(), props); +conn.createStatement().execute( +"CREATE TABLE " + DATA_TABLE_FULL_NAME + " (k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 VARCHAR) DEFAULT_COLUMN_FAMILY='A'"); +query = "SELECT * FROM " + DATA_TABLE_FULL_NAME; +rs = conn.createStatement().executeQuery(query); +assertFalse(rs.next()); + +String options = hasOptions ? "SALT_BUCKETS=10, MULTI_TENANT=true, IMMUTABLE_ROWS=true, DISABLE_WAL=true" : ""; +conn.createStatement().execute( +"CREATE INDEX " + INDEX_TABLE_NAME + " ON " + DATA_TABLE_FULL_NAME + " (v1) INCLUDE (v2) " + options); +query = "SELECT * FROM " + INDEX_TABLE_FULL_NAME; +rs = conn.createStatement().executeQuery(query); +assertFalse(rs.next()); + +//check options set correctly on index +TableName indexName = TableName.create(SCHEMA_NAME, INDEX_TABLE_NAME); +NamedTableNode indexNode = NamedTableNode.create(null, indexName, null); +ColumnResolver resolver = FromCompiler.getResolver(indexNode, conn.unwrap(PhoenixConnection.class)); +PTable indexTable = resolver.getTables().get(0).getTable(); +// Can't set IMMUTABLE_ROWS, MULTI_TENANT or DEFAULT_COLUMN_FAMILY_NAME on an index +assertNull(indexTable.getDefaultFamilyName()); +assertFalse(indexTable.isMultiTenant()); +assertFalse(indexTable.isImmutableRows()); +if(hasOptions) { +assertEquals(10, indexTable.getBucketNum().intValue()); +assertTrue(indexTable.isWALDisabled()); +} +} @Test public void testIndexWithNullableFixedWithCols() throws Exception { http://git-wip-us.apache.org/repos/asf/phoenix/blob/e9094d0a/phoenix-core/src/main/java/org/apache/phoenix/parse/CreateIndexStatement.java -
git commit: PHOENIX-1107 Support mutable indexes over replication
Repository: phoenix Updated Branches: refs/heads/4.0 e9094d0a4 -> 763f10f00 PHOENIX-1107 Support mutable indexes over replication Adding test to ensure that we still have indexes working over replication, rather than just relying on the fact that it 'just works'. Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/763f10f0 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/763f10f0 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/763f10f0 Branch: refs/heads/4.0 Commit: 763f10f00ff5f26c1a2df9b19f430253ee331d90 Parents: e9094d0 Author: Jesse Yates Authored: Mon Oct 6 11:50:47 2014 -0700 Committer: Jesse Yates Committed: Mon Oct 6 11:55:11 2014 -0700 -- .../index/MutableIndexReplicationIT.java| 280 +++ 1 file changed, 280 insertions(+) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/763f10f0/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexReplicationIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexReplicationIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexReplicationIT.java new file mode 100644 index 000..9981ed8 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexReplicationIT.java @@ -0,0 +1,280 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end.index; + +import com.google.common.collect.Maps; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.client.*; +import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest; +import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver; +import org.apache.phoenix.jdbc.PhoenixTestDriver; +import org.apache.phoenix.query.BaseTest; +import org.apache.phoenix.query.QueryServices; +import org.apache.phoenix.util.*; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.IOException; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; +import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; + +/** + * Test that we correctly replicate indexes over replication + * + * Code for setUp/teardown copied from org.apache.hadoop.hbase.replication.TestReplicationBase in + * HBase 0.98.5 + * + */ +@Category(NeedsOwnMiniClusterTest.class) +public class MutableIndexReplicationIT extends BaseTest { + +private static final Log LOG = LogFactory.getLog(MutableIndexReplicationIT.class); + +public static final String SCHEMA_NAME = ""; +public static final String DATA_TABLE_NAME = "T"; +public static final String INDEX_TABLE_NAME = "I"; +public static final String DATA_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, "T"); +public static final String INDEX_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, "I"); +private static final long REPLICATION_WAIT_TIME_MILLIS = 1; + +protected static PhoenixTestDriver driver; +private static String URL; + +protected static Configuration conf1 = HBaseConfiguration.create(); +protected static Configuration conf2; + +protected static ZooKeeperWatcher zkw1; +protected static ZooKeeperWatcher zkw2; + +protected static ReplicationAdmin admin; + +protected static HBaseTestingUtility utility1; +protected static HBaseTestingUtility utility2; +protected static final int REPLICATION_RE
git commit: PHOENIX-1107 Support mutable indexes over replication
Repository: phoenix Updated Branches: refs/heads/master ea0a502ce -> 3b30690f6 PHOENIX-1107 Support mutable indexes over replication Adding test to ensure that we still have indexes working over replication, rather than just relying on the fact that it 'just works'. Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3b30690f Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3b30690f Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3b30690f Branch: refs/heads/master Commit: 3b30690f6fdb11cc6c5d4bf53ee709b6e97196f7 Parents: ea0a502 Author: Jesse Yates Authored: Mon Oct 6 11:50:47 2014 -0700 Committer: Jesse Yates Committed: Mon Oct 6 11:53:23 2014 -0700 -- .../index/MutableIndexReplicationIT.java| 280 +++ 1 file changed, 280 insertions(+) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/3b30690f/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexReplicationIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexReplicationIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexReplicationIT.java new file mode 100644 index 000..9981ed8 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/MutableIndexReplicationIT.java @@ -0,0 +1,280 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end.index; + +import com.google.common.collect.Maps; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.client.*; +import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; +import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest; +import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver; +import org.apache.phoenix.jdbc.PhoenixTestDriver; +import org.apache.phoenix.query.BaseTest; +import org.apache.phoenix.query.QueryServices; +import org.apache.phoenix.util.*; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.io.IOException; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; +import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; + +/** + * Test that we correctly replicate indexes over replication + * + * Code for setUp/teardown copied from org.apache.hadoop.hbase.replication.TestReplicationBase in + * HBase 0.98.5 + * + */ +@Category(NeedsOwnMiniClusterTest.class) +public class MutableIndexReplicationIT extends BaseTest { + +private static final Log LOG = LogFactory.getLog(MutableIndexReplicationIT.class); + +public static final String SCHEMA_NAME = ""; +public static final String DATA_TABLE_NAME = "T"; +public static final String INDEX_TABLE_NAME = "I"; +public static final String DATA_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, "T"); +public static final String INDEX_TABLE_FULL_NAME = SchemaUtil.getTableName(SCHEMA_NAME, "I"); +private static final long REPLICATION_WAIT_TIME_MILLIS = 1; + +protected static PhoenixTestDriver driver; +private static String URL; + +protected static Configuration conf1 = HBaseConfiguration.create(); +protected static Configuration conf2; + +protected static ZooKeeperWatcher zkw1; +protected static ZooKeeperWatcher zkw2; + +protected static ReplicationAdmin admin; + +protected static HBaseTestingUtility utility1; +protected static HBaseTestingUtility utility2; +protected static final int REPLICAT
[3/3] git commit: PHOENIX-1286 Remove hadoop2 compat modules
PHOENIX-1286 Remove hadoop2 compat modules There was some reflection and wrapping done in the metrics/tracing tools to support working with Hadoop1/2 (though hadoop1 support was never completed). Removing this extra code now that we don't want to support hadoop1 anymore Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/842f8a8a Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/842f8a8a Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/842f8a8a Branch: refs/heads/master Commit: 842f8a8a8b84790ef11cf0dfd8f64b19de800bea Parents: 4ed9ddb Author: Jesse Yates Authored: Mon Sep 22 15:00:00 2014 -0700 Committer: Jesse Yates Committed: Mon Oct 27 13:54:58 2014 -0700 -- phoenix-assembly/pom.xml| 10 +- phoenix-core/pom.xml| 23 +- .../apache/phoenix/trace/BaseTracingTestIT.java | 112 --- .../phoenix/trace/DisableableMetricsWriter.java | 27 +- .../trace/Hadoop1TracingTestEnabler.java| 84 -- .../apache/phoenix/trace/PhoenixMetricImpl.java | 44 --- .../phoenix/trace/PhoenixMetricRecordImpl.java | 71 - .../trace/PhoenixTableMetricsWriterIT.java | 28 +- .../apache/phoenix/trace/PhoenixTagImpl.java| 22 +- .../phoenix/trace/PhoenixTraceReaderIT.java | 61 ++-- .../phoenix/trace/PhoenixTracingEndToEndIT.java | 59 ++-- .../apache/phoenix/trace/TracingTestUtil.java | 14 + .../org/apache/phoenix/hbase/index/Indexer.java | 4 +- .../org/apache/phoenix/metrics/MetricInfo.java | 51 .../org/apache/phoenix/metrics/Metrics.java | 66 .../apache/phoenix/trace/MetricsInfoImpl.java | 63 .../phoenix/trace/PhoenixMetricsSink.java | 298 +++ .../trace/PhoenixTableMetricsWriter.java| 278 - .../apache/phoenix/trace/TraceMetricSource.java | 188 .../org/apache/phoenix/trace/TraceReader.java | 12 +- .../org/apache/phoenix/trace/TracingUtils.java | 63 .../org/apache/phoenix/trace/util/Tracing.java | 5 +- .../metrics2/impl/ExposedMetricCounterLong.java | 36 +++ .../metrics2/impl/ExposedMetricsRecordImpl.java | 42 +++ .../metrics2/lib/ExposedMetricsInfoImpl.java| 34 +++ .../org/apache/phoenix/metrics/LoggingSink.java | 60 .../phoenix/trace/TraceMetricsSourceTest.java | 96 ++ phoenix-hadoop-compat/pom.xml | 89 -- .../org/apache/phoenix/metrics/MetricInfo.java | 51 .../org/apache/phoenix/metrics/Metrics.java | 80 - .../apache/phoenix/metrics/MetricsManager.java | 58 .../apache/phoenix/metrics/MetricsWriter.java | 31 -- .../phoenix/metrics/PhoenixAbstractMetric.java | 30 -- .../phoenix/metrics/PhoenixMetricTag.java | 27 -- .../phoenix/metrics/PhoenixMetricsRecord.java | 35 --- .../phoenix/trace/PhoenixSpanReceiver.java | 26 -- .../phoenix/trace/TestableMetricsWriter.java| 30 -- .../org/apache/phoenix/trace/TracingCompat.java | 89 -- .../org/apache/phoenix/metrics/LoggingSink.java | 56 .../phoenix/metrics/TracingTestCompat.java | 45 --- phoenix-hadoop2-compat/pom.xml | 77 - .../phoenix/metrics/MetricsManagerImpl.java | 71 - .../apache/phoenix/trace/MetricsInfoImpl.java | 63 .../phoenix/trace/PhoenixMetricsSink.java | 191 .../apache/phoenix/trace/TraceMetricSource.java | 197 .../org.apache.phoenix.metrics.MetricsManager | 1 - ...org.apache.phoenix.trace.PhoenixSpanReceiver | 1 - ...g.apache.phoenix.trace.TestableMetricsWriter | 1 - .../metrics2/impl/ExposedMetricCounterLong.java | 35 --- .../metrics2/impl/ExposedMetricsRecordImpl.java | 43 --- .../metrics2/lib/ExposedMetricsInfoImpl.java| 32 -- .../phoenix/trace/PhoenixMetricsWriterTest.java | 142 - .../phoenix/trace/TraceMetricsSourceTest.java | 96 -- .../org/apache/phoenix/trace/TracingTest.java | 34 --- pom.xml | 27 -- 55 files changed, 1156 insertions(+), 2353 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/842f8a8a/phoenix-assembly/pom.xml -- diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml index ea2bdb1..6c29f32 100644 --- a/phoenix-assembly/pom.xml +++ b/phoenix-assembly/pom.xml @@ -138,14 +138,6 @@ org.apache.phoenix - phoenix-hadoop-compat - - - org.apache.phoenix - phoenix-hadoop2-compat - - - org.apache.phoenix phoenix-flume @@ -153,4 +145,4 @@ phoenix-pig - + \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/842f8a8a/phoenix-core/pom.xml
[2/3] PHOENIX-1286 Remove hadoop2 compat modules
http://git-wip-us.apache.org/repos/asf/phoenix/blob/842f8a8a/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java b/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java new file mode 100644 index 000..6ae52d8 --- /dev/null +++ b/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.trace; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.cloudera.htrace.Span; + +/** + * Utilities for tracing + */ +public class TracingUtils { + +private static final Log LOG = LogFactory.getLog(TracingUtils.class); + +public static final String METRIC_SOURCE_KEY = "phoenix."; + +/** Set context to enable filtering */ +public static final String METRICS_CONTEXT = "tracing"; + +/** Marker metric to ensure that we register the tracing mbeans */ +public static final String METRICS_MARKER_CONTEXT = "marker"; + +public static void addAnnotation(Span span, String message, int value) { +span.addKVAnnotation(message.getBytes(), Bytes.toBytes(Integer.toString(value))); +} + +public static Pair readAnnotation(byte[] key, byte[] value) { +return new Pair(new String(key), Bytes.toString(value)); +} + +/** + * @see #getTraceMetricName(String) + */ +public static final String getTraceMetricName(long traceId) { +return getTraceMetricName(Long.toString(traceId)); +} + +/** + * @param traceId unique id of the trace + * @return the name of the metric record that should be generated for a given trace + */ +public static final String getTraceMetricName(String traceId) { +return METRIC_SOURCE_KEY + traceId; +} +} http://git-wip-us.apache.org/repos/asf/phoenix/blob/842f8a8a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java b/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java index d0677cf..b093b9c 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java @@ -36,8 +36,7 @@ import org.apache.phoenix.call.CallWrapper; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; -import org.apache.phoenix.trace.TracingCompat; -import org.apache.phoenix.util.StringUtil; +import org.apache.phoenix.trace.TraceMetricSource; import org.cloudera.htrace.Sampler; import org.cloudera.htrace.Span; import org.cloudera.htrace.Trace; @@ -313,7 +312,7 @@ public class Tracing { public synchronized static void addTraceMetricsSource() { try { if (!initialized) { -Trace.addReceiver(TracingCompat.newTraceMetricSource()); +Trace.addReceiver(new TraceMetricSource()); } } catch (RuntimeException e) { LOG.warn("Tracing will outputs will not be written to any metrics sink! No " http://git-wip-us.apache.org/repos/asf/phoenix/blob/842f8a8a/phoenix-core/src/test/java/org/apache/hadoop/metrics2/impl/ExposedMetricCounterLong.java -- diff --git a/phoenix-core/src/test/java/org/apache/hadoop/metrics2/impl/ExposedMetricCounterLong.java b/phoenix-core/src/test/java/org/apache/hadoop/metrics2/impl/ExposedMetricCounterLong.java new file mode 100644 index 000..f4dfd74 --- /dev/null +++ b/phoenix-core/src/test/java/org/apache/hadoop/metrics2/impl/ExposedMetricCounterLong.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file
[1/3] PHOENIX-1286 Remove hadoop2 compat modules
Repository: phoenix Updated Branches: refs/heads/master 4ed9ddb8f -> 842f8a8a8 http://git-wip-us.apache.org/repos/asf/phoenix/blob/842f8a8a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java -- diff --git a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java b/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java deleted file mode 100644 index 3258e8a..000 --- a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.phoenix.trace; - -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.metrics2.MetricsCollector; -import org.apache.hadoop.metrics2.MetricsRecordBuilder; -import org.apache.hadoop.metrics2.MetricsTag; -import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; -import org.cloudera.htrace.Span; -import org.cloudera.htrace.impl.MilliSpan; -import org.junit.BeforeClass; -import org.junit.Test; -import org.mockito.Mockito; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -/** - * Test that the @{link TraceMetricSource} correctly handles different kinds of traces - */ -public class TraceMetricsSourceTest { - - @BeforeClass - public static void setup() throws Exception{ -DefaultMetricsSystem.setMiniClusterMode(true); - } - - /** - * For PHOENIX-1126, Phoenix originally assumed all the annotation values were integers, - * but HBase writes some strings as well, so we need to be able to handle that too - */ - @Test - public void testNonIntegerAnnotations(){ -Span span = getSpan(); -// make sure its less than the length of an integer -byte[] value = Bytes.toBytes("a"); -byte[] someInt = Bytes.toBytes(1); -assertTrue(someInt.length >value.length); - -// an annotation that is not an integer -span.addKVAnnotation(Bytes.toBytes("key"), value); - -// Create the sink and write the span -TraceMetricSource source = new TraceMetricSource(); -source.receiveSpan(span); - } - - @Test - public void testIntegerAnnotations(){ -Span span = getSpan(); - -// add annotation through the phoenix interfaces -TracingCompat.addAnnotation(span, "message", 10); - -TraceMetricSource source = new TraceMetricSource(); -source.receiveSpan(span); - } - - /** - * If the source does not write any metrics when there are no spans, i.e. when initialized, - * then the metrics system will discard the source, so it needs to always emit some metrics. - */ - @Test - public void testWritesInfoWhenNoSpans(){ -TraceMetricSource source = new TraceMetricSource(); -MetricsCollector collector = Mockito.mock(MetricsCollector.class); -MetricsRecordBuilder builder = Mockito.mock(MetricsRecordBuilder.class); -Mockito.when(collector.addRecord(Mockito.anyString())).thenReturn(builder); - -source.getMetrics(collector, true); - -// verify that we add a record and that the record has some info -Mockito.verify(collector).addRecord(Mockito.anyString()); -Mockito.verify(builder).add(Mockito.any(MetricsTag.class)); - } - - private Span getSpan(){ -return new MilliSpan("test span", 0, 1 , 2, "pid"); - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/842f8a8a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TracingTest.java -- diff --git a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TracingTest.java b/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TracingTest.java deleted file mode 100644 index ffe6c82..000 --- a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TracingTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownershi
[1/3] PHOENIX-1286 Remove hadoop2 compat modules
Repository: phoenix Updated Branches: refs/heads/4.0 b2c5ffa9c -> b48ca7b5c http://git-wip-us.apache.org/repos/asf/phoenix/blob/b48ca7b5/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java -- diff --git a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java b/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java deleted file mode 100644 index 3258e8a..000 --- a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TraceMetricsSourceTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.phoenix.trace; - -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.metrics2.MetricsCollector; -import org.apache.hadoop.metrics2.MetricsRecordBuilder; -import org.apache.hadoop.metrics2.MetricsTag; -import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; -import org.cloudera.htrace.Span; -import org.cloudera.htrace.impl.MilliSpan; -import org.junit.BeforeClass; -import org.junit.Test; -import org.mockito.Mockito; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -/** - * Test that the @{link TraceMetricSource} correctly handles different kinds of traces - */ -public class TraceMetricsSourceTest { - - @BeforeClass - public static void setup() throws Exception{ -DefaultMetricsSystem.setMiniClusterMode(true); - } - - /** - * For PHOENIX-1126, Phoenix originally assumed all the annotation values were integers, - * but HBase writes some strings as well, so we need to be able to handle that too - */ - @Test - public void testNonIntegerAnnotations(){ -Span span = getSpan(); -// make sure its less than the length of an integer -byte[] value = Bytes.toBytes("a"); -byte[] someInt = Bytes.toBytes(1); -assertTrue(someInt.length >value.length); - -// an annotation that is not an integer -span.addKVAnnotation(Bytes.toBytes("key"), value); - -// Create the sink and write the span -TraceMetricSource source = new TraceMetricSource(); -source.receiveSpan(span); - } - - @Test - public void testIntegerAnnotations(){ -Span span = getSpan(); - -// add annotation through the phoenix interfaces -TracingCompat.addAnnotation(span, "message", 10); - -TraceMetricSource source = new TraceMetricSource(); -source.receiveSpan(span); - } - - /** - * If the source does not write any metrics when there are no spans, i.e. when initialized, - * then the metrics system will discard the source, so it needs to always emit some metrics. - */ - @Test - public void testWritesInfoWhenNoSpans(){ -TraceMetricSource source = new TraceMetricSource(); -MetricsCollector collector = Mockito.mock(MetricsCollector.class); -MetricsRecordBuilder builder = Mockito.mock(MetricsRecordBuilder.class); -Mockito.when(collector.addRecord(Mockito.anyString())).thenReturn(builder); - -source.getMetrics(collector, true); - -// verify that we add a record and that the record has some info -Mockito.verify(collector).addRecord(Mockito.anyString()); -Mockito.verify(builder).add(Mockito.any(MetricsTag.class)); - } - - private Span getSpan(){ -return new MilliSpan("test span", 0, 1 , 2, "pid"); - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/b48ca7b5/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TracingTest.java -- diff --git a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TracingTest.java b/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TracingTest.java deleted file mode 100644 index ffe6c82..000 --- a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/TracingTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership.
[3/3] git commit: PHOENIX-1286 Remove hadoop2 compat modules
PHOENIX-1286 Remove hadoop2 compat modules There was some reflection and wrapping done in the metrics/tracing tools to support working with Hadoop1/2 (though hadoop1 support was never completed). Removing this extra code now that we don't want to support hadoop1 anymore Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/b48ca7b5 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/b48ca7b5 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/b48ca7b5 Branch: refs/heads/4.0 Commit: b48ca7b5c3c97fe00c7e89978fb387d7013be320 Parents: b2c5ffa Author: Jesse Yates Authored: Mon Sep 22 15:00:00 2014 -0700 Committer: Jesse Yates Committed: Mon Oct 27 13:56:29 2014 -0700 -- phoenix-assembly/pom.xml| 10 +- phoenix-core/pom.xml| 23 +- .../apache/phoenix/trace/BaseTracingTestIT.java | 112 --- .../phoenix/trace/DisableableMetricsWriter.java | 27 +- .../trace/Hadoop1TracingTestEnabler.java| 84 -- .../apache/phoenix/trace/PhoenixMetricImpl.java | 44 --- .../phoenix/trace/PhoenixMetricRecordImpl.java | 71 - .../trace/PhoenixTableMetricsWriterIT.java | 28 +- .../apache/phoenix/trace/PhoenixTagImpl.java| 22 +- .../phoenix/trace/PhoenixTraceReaderIT.java | 61 ++-- .../phoenix/trace/PhoenixTracingEndToEndIT.java | 59 ++-- .../apache/phoenix/trace/TracingTestUtil.java | 14 + .../org/apache/phoenix/hbase/index/Indexer.java | 4 +- .../org/apache/phoenix/metrics/MetricInfo.java | 51 .../org/apache/phoenix/metrics/Metrics.java | 66 .../apache/phoenix/trace/MetricsInfoImpl.java | 63 .../phoenix/trace/PhoenixMetricsSink.java | 298 +++ .../trace/PhoenixTableMetricsWriter.java| 278 - .../apache/phoenix/trace/TraceMetricSource.java | 188 .../org/apache/phoenix/trace/TraceReader.java | 12 +- .../org/apache/phoenix/trace/TracingUtils.java | 63 .../org/apache/phoenix/trace/util/Tracing.java | 5 +- .../metrics2/impl/ExposedMetricCounterLong.java | 36 +++ .../metrics2/impl/ExposedMetricsRecordImpl.java | 42 +++ .../metrics2/lib/ExposedMetricsInfoImpl.java| 34 +++ .../org/apache/phoenix/metrics/LoggingSink.java | 60 .../phoenix/trace/TraceMetricsSourceTest.java | 96 ++ phoenix-hadoop-compat/pom.xml | 89 -- .../org/apache/phoenix/metrics/MetricInfo.java | 51 .../org/apache/phoenix/metrics/Metrics.java | 80 - .../apache/phoenix/metrics/MetricsManager.java | 58 .../apache/phoenix/metrics/MetricsWriter.java | 31 -- .../phoenix/metrics/PhoenixAbstractMetric.java | 30 -- .../phoenix/metrics/PhoenixMetricTag.java | 27 -- .../phoenix/metrics/PhoenixMetricsRecord.java | 35 --- .../phoenix/trace/PhoenixSpanReceiver.java | 26 -- .../phoenix/trace/TestableMetricsWriter.java| 30 -- .../org/apache/phoenix/trace/TracingCompat.java | 89 -- .../org/apache/phoenix/metrics/LoggingSink.java | 56 .../phoenix/metrics/TracingTestCompat.java | 45 --- phoenix-hadoop2-compat/pom.xml | 77 - .../phoenix/metrics/MetricsManagerImpl.java | 71 - .../apache/phoenix/trace/MetricsInfoImpl.java | 63 .../phoenix/trace/PhoenixMetricsSink.java | 191 .../apache/phoenix/trace/TraceMetricSource.java | 197 .../org.apache.phoenix.metrics.MetricsManager | 1 - ...org.apache.phoenix.trace.PhoenixSpanReceiver | 1 - ...g.apache.phoenix.trace.TestableMetricsWriter | 1 - .../metrics2/impl/ExposedMetricCounterLong.java | 35 --- .../metrics2/impl/ExposedMetricsRecordImpl.java | 43 --- .../metrics2/lib/ExposedMetricsInfoImpl.java| 32 -- .../phoenix/trace/PhoenixMetricsWriterTest.java | 142 - .../phoenix/trace/TraceMetricsSourceTest.java | 96 -- .../org/apache/phoenix/trace/TracingTest.java | 34 --- pom.xml | 27 -- 55 files changed, 1156 insertions(+), 2353 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/b48ca7b5/phoenix-assembly/pom.xml -- diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml index fe02636..b0cbac4 100644 --- a/phoenix-assembly/pom.xml +++ b/phoenix-assembly/pom.xml @@ -138,14 +138,6 @@ org.apache.phoenix - phoenix-hadoop-compat - - - org.apache.phoenix - phoenix-hadoop2-compat - - - org.apache.phoenix phoenix-flume @@ -153,4 +145,4 @@ phoenix-pig - + \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/b48ca7b5/phoenix-core/pom.xml ---
[2/3] PHOENIX-1286 Remove hadoop2 compat modules
http://git-wip-us.apache.org/repos/asf/phoenix/blob/b48ca7b5/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java b/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java new file mode 100644 index 000..6ae52d8 --- /dev/null +++ b/phoenix-core/src/main/java/org/apache/phoenix/trace/TracingUtils.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.trace; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.cloudera.htrace.Span; + +/** + * Utilities for tracing + */ +public class TracingUtils { + +private static final Log LOG = LogFactory.getLog(TracingUtils.class); + +public static final String METRIC_SOURCE_KEY = "phoenix."; + +/** Set context to enable filtering */ +public static final String METRICS_CONTEXT = "tracing"; + +/** Marker metric to ensure that we register the tracing mbeans */ +public static final String METRICS_MARKER_CONTEXT = "marker"; + +public static void addAnnotation(Span span, String message, int value) { +span.addKVAnnotation(message.getBytes(), Bytes.toBytes(Integer.toString(value))); +} + +public static Pair readAnnotation(byte[] key, byte[] value) { +return new Pair(new String(key), Bytes.toString(value)); +} + +/** + * @see #getTraceMetricName(String) + */ +public static final String getTraceMetricName(long traceId) { +return getTraceMetricName(Long.toString(traceId)); +} + +/** + * @param traceId unique id of the trace + * @return the name of the metric record that should be generated for a given trace + */ +public static final String getTraceMetricName(String traceId) { +return METRIC_SOURCE_KEY + traceId; +} +} http://git-wip-us.apache.org/repos/asf/phoenix/blob/b48ca7b5/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java b/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java index d0677cf..b093b9c 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java @@ -36,8 +36,7 @@ import org.apache.phoenix.call.CallWrapper; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; -import org.apache.phoenix.trace.TracingCompat; -import org.apache.phoenix.util.StringUtil; +import org.apache.phoenix.trace.TraceMetricSource; import org.cloudera.htrace.Sampler; import org.cloudera.htrace.Span; import org.cloudera.htrace.Trace; @@ -313,7 +312,7 @@ public class Tracing { public synchronized static void addTraceMetricsSource() { try { if (!initialized) { -Trace.addReceiver(TracingCompat.newTraceMetricSource()); +Trace.addReceiver(new TraceMetricSource()); } } catch (RuntimeException e) { LOG.warn("Tracing will outputs will not be written to any metrics sink! No " http://git-wip-us.apache.org/repos/asf/phoenix/blob/b48ca7b5/phoenix-core/src/test/java/org/apache/hadoop/metrics2/impl/ExposedMetricCounterLong.java -- diff --git a/phoenix-core/src/test/java/org/apache/hadoop/metrics2/impl/ExposedMetricCounterLong.java b/phoenix-core/src/test/java/org/apache/hadoop/metrics2/impl/ExposedMetricCounterLong.java new file mode 100644 index 000..f4dfd74 --- /dev/null +++ b/phoenix-core/src/test/java/org/apache/hadoop/metrics2/impl/ExposedMetricCounterLong.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file
phoenix git commit: PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause
Repository: phoenix Updated Branches: refs/heads/master 0c1fd3ad5 -> 8ece81b55 PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause Setting the condition in the PhoenixMapReduceUtil, as well as some slight cleanup for duplicate code in setInput(). Adding a test that covers mapreduce with and without a condition. Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8ece81b5 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8ece81b5 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8ece81b5 Branch: refs/heads/master Commit: 8ece81b5522df3e6bd9dfdb3112e101215bb49f1 Parents: 0c1fd3a Author: Jesse Yates Authored: Wed Feb 10 12:46:47 2016 -0800 Committer: Jesse Yates Committed: Fri Feb 12 12:15:42 2016 -0800 -- .../org/apache/phoenix/end2end/MapReduceIT.java | 230 +++ .../mapreduce/util/PhoenixMapReduceUtil.java| 65 +++--- 2 files changed, 264 insertions(+), 31 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/8ece81b5/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java new file mode 100644 index 000..f030701 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java @@ -0,0 +1,230 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.apache.hadoop.mapreduce.lib.db.DBWritable; +import org.apache.phoenix.mapreduce.PhoenixOutputFormat; +import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil; +import org.apache.phoenix.mapreduce.util.PhoenixMapReduceUtil; +import org.apache.phoenix.schema.types.PDouble; +import org.apache.phoenix.schema.types.PhoenixArray; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.sql.*; + +import static org.junit.Assert.*; + +/** + * Test that our MapReduce basic tools work as expected + */ +public class MapReduceIT extends BaseHBaseManagedTimeIT { + +private static final String STOCK_TABLE_NAME = "stock"; +private static final String STOCK_STATS_TABLE_NAME = "stock_stats"; +private static final String STOCK_NAME = "STOCK_NAME"; +private static final String RECORDING_YEAR = "RECORDING_YEAR"; +private static final String RECORDINGS_QUARTER = "RECORDINGS_QUARTER"; +private static final String CREATE_STOCK_TABLE = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE_NAME + " ( " + +STOCK_NAME + " VARCHAR NOT NULL ," + RECORDING_YEAR + " INTEGER NOT NULL, " + RECORDINGS_QUARTER + +" DOUBLE array[] CONSTRAINT pk PRIMARY KEY (" + STOCK_NAME + " , " + RECORDING_YEAR + "))"; + +private static final String MAX_RECORDING = "MAX_RECORDING"; +private static final String CREATE_STOCK_STATS_TABLE = +"CREATE TABLE IF NOT EXISTS " + STOCK_STATS_TABLE_NAME + "(" + STOCK_NAME + " VARCHAR NOT NULL , " ++ MAX_RECORDING + " DOUBLE CONSTRAINT pk PRIMARY KEY (" + STOCK_NAME + "))"; +private static final String UPSERT = "UPSERT into " + STOCK_TABLE_NAME + " values (?, ?, ?)"; + +@Before +public void setupTables() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +conn.createStatement().execute(CREATE_STOCK_TABLE); +conn.createStatement().execute(CREATE_STOCK_STATS_TABLE); +conn.commit(); +} + +@Test +public void testNoConditionsOnSelect() throws Exception { +final Configuration conf = getUtility().getConfiguration(); +
phoenix git commit: PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-1.0 fd757a055 -> acca4129b PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause Setting the condition in the PhoenixMapReduceUtil, as well as some slight cleanup for duplicate code in setInput(). Adding a test that covers mapreduce with and without a condition. Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/acca4129 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/acca4129 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/acca4129 Branch: refs/heads/4.x-HBase-1.0 Commit: acca4129b0b038e59ee047d88a8f445d4ebf1e6d Parents: fd757a0 Author: Jesse Yates Authored: Wed Feb 10 12:46:47 2016 -0800 Committer: Jesse Yates Committed: Fri Feb 12 12:16:31 2016 -0800 -- .../org/apache/phoenix/end2end/MapReduceIT.java | 230 +++ .../mapreduce/util/PhoenixMapReduceUtil.java| 65 +++--- 2 files changed, 264 insertions(+), 31 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/acca4129/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java new file mode 100644 index 000..f030701 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java @@ -0,0 +1,230 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.apache.hadoop.mapreduce.lib.db.DBWritable; +import org.apache.phoenix.mapreduce.PhoenixOutputFormat; +import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil; +import org.apache.phoenix.mapreduce.util.PhoenixMapReduceUtil; +import org.apache.phoenix.schema.types.PDouble; +import org.apache.phoenix.schema.types.PhoenixArray; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.sql.*; + +import static org.junit.Assert.*; + +/** + * Test that our MapReduce basic tools work as expected + */ +public class MapReduceIT extends BaseHBaseManagedTimeIT { + +private static final String STOCK_TABLE_NAME = "stock"; +private static final String STOCK_STATS_TABLE_NAME = "stock_stats"; +private static final String STOCK_NAME = "STOCK_NAME"; +private static final String RECORDING_YEAR = "RECORDING_YEAR"; +private static final String RECORDINGS_QUARTER = "RECORDINGS_QUARTER"; +private static final String CREATE_STOCK_TABLE = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE_NAME + " ( " + +STOCK_NAME + " VARCHAR NOT NULL ," + RECORDING_YEAR + " INTEGER NOT NULL, " + RECORDINGS_QUARTER + +" DOUBLE array[] CONSTRAINT pk PRIMARY KEY (" + STOCK_NAME + " , " + RECORDING_YEAR + "))"; + +private static final String MAX_RECORDING = "MAX_RECORDING"; +private static final String CREATE_STOCK_STATS_TABLE = +"CREATE TABLE IF NOT EXISTS " + STOCK_STATS_TABLE_NAME + "(" + STOCK_NAME + " VARCHAR NOT NULL , " ++ MAX_RECORDING + " DOUBLE CONSTRAINT pk PRIMARY KEY (" + STOCK_NAME + "))"; +private static final String UPSERT = "UPSERT into " + STOCK_TABLE_NAME + " values (?, ?, ?)"; + +@Before +public void setupTables() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +conn.createStatement().execute(CREATE_STOCK_TABLE); +conn.createStatement().execute(CREATE_STOCK_STATS_TABLE); +conn.commit(); +} + +@Test +public void testNoConditionsOnSelect() throws Exception { +final Configuration conf = getUtility().getConfigurati
phoenix git commit: PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-0.98 03b1dd229 -> 35b893d21 PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause Setting the condition in the PhoenixMapReduceUtil, as well as some slight cleanup for duplicate code in setInput(). Adding a test that covers mapreduce with and without a condition. Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/35b893d2 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/35b893d2 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/35b893d2 Branch: refs/heads/4.x-HBase-0.98 Commit: 35b893d219a02da2dd2588fbee1ecd8a33ed25cc Parents: 03b1dd2 Author: Jesse Yates Authored: Wed Feb 10 12:46:47 2016 -0800 Committer: Jesse Yates Committed: Fri Feb 12 12:17:30 2016 -0800 -- .../org/apache/phoenix/end2end/MapReduceIT.java | 230 +++ .../mapreduce/util/PhoenixMapReduceUtil.java| 65 +++--- 2 files changed, 264 insertions(+), 31 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/35b893d2/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java -- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java new file mode 100644 index 000..f030701 --- /dev/null +++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MapReduceIT.java @@ -0,0 +1,230 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.end2end; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.DoubleWritable; +import org.apache.hadoop.io.NullWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.hadoop.mapreduce.Reducer; +import org.apache.hadoop.mapreduce.lib.db.DBWritable; +import org.apache.phoenix.mapreduce.PhoenixOutputFormat; +import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil; +import org.apache.phoenix.mapreduce.util.PhoenixMapReduceUtil; +import org.apache.phoenix.schema.types.PDouble; +import org.apache.phoenix.schema.types.PhoenixArray; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.sql.*; + +import static org.junit.Assert.*; + +/** + * Test that our MapReduce basic tools work as expected + */ +public class MapReduceIT extends BaseHBaseManagedTimeIT { + +private static final String STOCK_TABLE_NAME = "stock"; +private static final String STOCK_STATS_TABLE_NAME = "stock_stats"; +private static final String STOCK_NAME = "STOCK_NAME"; +private static final String RECORDING_YEAR = "RECORDING_YEAR"; +private static final String RECORDINGS_QUARTER = "RECORDINGS_QUARTER"; +private static final String CREATE_STOCK_TABLE = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE_NAME + " ( " + +STOCK_NAME + " VARCHAR NOT NULL ," + RECORDING_YEAR + " INTEGER NOT NULL, " + RECORDINGS_QUARTER + +" DOUBLE array[] CONSTRAINT pk PRIMARY KEY (" + STOCK_NAME + " , " + RECORDING_YEAR + "))"; + +private static final String MAX_RECORDING = "MAX_RECORDING"; +private static final String CREATE_STOCK_STATS_TABLE = +"CREATE TABLE IF NOT EXISTS " + STOCK_STATS_TABLE_NAME + "(" + STOCK_NAME + " VARCHAR NOT NULL , " ++ MAX_RECORDING + " DOUBLE CONSTRAINT pk PRIMARY KEY (" + STOCK_NAME + "))"; +private static final String UPSERT = "UPSERT into " + STOCK_TABLE_NAME + " values (?, ?, ?)"; + +@Before +public void setupTables() throws Exception { +Connection conn = DriverManager.getConnection(getUrl()); +conn.createStatement().execute(CREATE_STOCK_TABLE); +conn.createStatement().execute(CREATE_STOCK_STATS_TABLE); +conn.commit(); +} + +@Test +public void testNoConditionsOnSelect() throws Exception { +final Configuration conf = getUtility().getConfigura
phoenix git commit: PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause (addendum)
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-0.98 35b893d21 -> 470477e50 PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause (addendum) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/470477e5 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/470477e5 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/470477e5 Branch: refs/heads/4.x-HBase-0.98 Commit: 470477e5053075e5ebf128bc3ae532e908a25067 Parents: 35b893d Author: Jesse Yates Authored: Fri Feb 12 15:46:11 2016 -0800 Committer: Jesse Yates Committed: Fri Feb 12 16:14:41 2016 -0800 -- .../org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/470477e5/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java index 125c6a8..98f0364 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java @@ -58,8 +58,9 @@ public final class PhoenixMapReduceUtil { * @param inputQuery Select query. */ public static void setInput(final Job job, final Class inputClass, final String tableName, final String inputQuery) { - final Configuration configuration = setInput(job, inputClass, tableName); - PhoenixConfigurationUtil.setSchemaType(configuration, SchemaType.QUERY); +final Configuration configuration = setInput(job, inputClass, tableName); +PhoenixConfigurationUtil.setInputQuery(configuration, inputQuery); +PhoenixConfigurationUtil.setSchemaType(configuration, SchemaType.QUERY); } private static Configuration setInput(final Job job, final Class inputClass, final String tableName){
phoenix git commit: PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause (addendum)
Repository: phoenix Updated Branches: refs/heads/master 8ece81b55 -> edd94b28a PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause (addendum) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/edd94b28 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/edd94b28 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/edd94b28 Branch: refs/heads/master Commit: edd94b28ab46877aa15e94713274516619fa43b1 Parents: 8ece81b Author: Jesse Yates Authored: Fri Feb 12 15:46:11 2016 -0800 Committer: Jesse Yates Committed: Fri Feb 12 15:46:51 2016 -0800 -- .../org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/edd94b28/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java index 125c6a8..98f0364 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java @@ -58,8 +58,9 @@ public final class PhoenixMapReduceUtil { * @param inputQuery Select query. */ public static void setInput(final Job job, final Class inputClass, final String tableName, final String inputQuery) { - final Configuration configuration = setInput(job, inputClass, tableName); - PhoenixConfigurationUtil.setSchemaType(configuration, SchemaType.QUERY); +final Configuration configuration = setInput(job, inputClass, tableName); +PhoenixConfigurationUtil.setInputQuery(configuration, inputQuery); +PhoenixConfigurationUtil.setSchemaType(configuration, SchemaType.QUERY); } private static Configuration setInput(final Job job, final Class inputClass, final String tableName){
phoenix git commit: PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause (addendum)
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-1.0 acca4129b -> ace996372 PHOENIX-2674 PhoenixMapReduceUtil#setInput doesn't honor condition clause (addendum) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ace99637 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ace99637 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ace99637 Branch: refs/heads/4.x-HBase-1.0 Commit: ace996372077e7eb84410cb9ddc764a5bec6af2f Parents: acca412 Author: Jesse Yates Authored: Fri Feb 12 15:46:11 2016 -0800 Committer: Jesse Yates Committed: Fri Feb 12 16:14:31 2016 -0800 -- .../org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/ace99637/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java -- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java index 125c6a8..98f0364 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/util/PhoenixMapReduceUtil.java @@ -58,8 +58,9 @@ public final class PhoenixMapReduceUtil { * @param inputQuery Select query. */ public static void setInput(final Job job, final Class inputClass, final String tableName, final String inputQuery) { - final Configuration configuration = setInput(job, inputClass, tableName); - PhoenixConfigurationUtil.setSchemaType(configuration, SchemaType.QUERY); +final Configuration configuration = setInput(job, inputClass, tableName); +PhoenixConfigurationUtil.setInputQuery(configuration, inputQuery); +PhoenixConfigurationUtil.setSchemaType(configuration, SchemaType.QUERY); } private static Configuration setInput(final Job job, final Class inputClass, final String tableName){