Repository: phoenix Updated Branches: refs/heads/4.0 d1a8f49a8 -> 572724ef8
PHOENIX-1187 Enable tracing on server Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/572724ef Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/572724ef Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/572724ef Branch: refs/heads/4.0 Commit: 572724ef8bcab8e48eefec910a8654114e2bfebb Parents: d1a8f49 Author: Jesse Yates <jya...@apache.org> Authored: Tue Aug 19 15:25:57 2014 -0700 Committer: Jesse Yates <jya...@apache.org> Committed: Wed Aug 20 13:42:43 2014 -0700 ---------------------------------------------------------------------- phoenix-assembly/pom.xml | 2 +- phoenix-assembly/src/build/all.xml | 184 ------------------ .../components/all-common-dependencies.xml | 62 ++++++ .../src/build/components/all-common-files.xml | 71 +++++++ .../src/build/components/all-common-jars.xml | 74 +++++++ phoenix-assembly/src/build/hadoop-one-all.xml | 53 +++++ phoenix-assembly/src/build/hadoop-two-all.xml | 64 +++++++ .../coprocessor/MetaDataEndpointImpl.java | 11 ++ .../apache/phoenix/jdbc/PhoenixConnection.java | 9 +- .../trace/PhoenixTableMetricsWriter.java | 45 +++-- .../org/apache/phoenix/trace/util/Tracing.java | 22 +++ .../java/org/apache/phoenix/util/QueryUtil.java | 33 +++- .../org/apache/phoenix/metrics/Metrics.java | 41 ++++ .../org/apache/phoenix/trace/TracingCompat.java | 3 + .../bin/hadoop-metrics2-hbase.properties | 20 ++ .../bin/hadoop-metrics2-phoenix.properties | 53 +++++ .../phoenix/trace/PhoenixMetricsSink.java | 191 +++++++++++++++++++ .../phoenix/trace/PhoenixMetricsWriter.java | 176 ----------------- .../apache/phoenix/trace/TraceMetricSource.java | 13 +- .../phoenix/trace/PhoenixMetricsWriterTest.java | 2 +- pom.xml | 6 +- 21 files changed, 740 insertions(+), 395 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-assembly/pom.xml ---------------------------------------------------------------------- diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml index b3c24d2..a34b09c 100644 --- a/phoenix-assembly/pom.xml +++ b/phoenix-assembly/pom.xml @@ -72,7 +72,7 @@ <tarLongFileMode>gnu</tarLongFileMode> <appendAssemblyId>false</appendAssemblyId> <descriptors> - <descriptor>src/build/all.xml</descriptor> + <descriptor>src/build/${assembly.descriptor}</descriptor> </descriptors> </configuration> </execution> http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-assembly/src/build/all.xml ---------------------------------------------------------------------- diff --git a/phoenix-assembly/src/build/all.xml b/phoenix-assembly/src/build/all.xml deleted file mode 100644 index 8fa5884..0000000 --- a/phoenix-assembly/src/build/all.xml +++ /dev/null @@ -1,184 +0,0 @@ -<?xml version='1.0'?> -<!-- - - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. - ---> - -<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" - xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd"> - <!--This 'all' id is not appended to the produced bundle because we do this: http://maven.apache.org/plugins/maven-assembly-plugin/faq.html#required-classifiers --> - <id>all</id> - <formats> - <format>tar.gz</format> - </formats> - <includeBaseDirectory>true</includeBaseDirectory> - - <fileSets> - <!-- Top level directories --> - <fileSet> - <!--Get misc project files --> - <directory>${project.basedir}/..</directory> - <outputDirectory>/</outputDirectory> - <includes> - <include>*.txt</include> - </includes> - </fileSet> - <fileSet> - <directory>${project.basedir}/../bin</directory> - <outputDirectory>bin</outputDirectory> - <fileMode>0755</fileMode> - <directoryMode>0755</directoryMode> - <includes> - <include>*.py</include> - <include>*.sh</include> - </includes> - </fileSet> - <fileSet> - <directory>${project.basedir}/../bin</directory> - <outputDirectory>bin</outputDirectory> - <fileMode>0644</fileMode> - <directoryMode>0755</directoryMode> - <excludes> - <exclude>*.py/</exclude> - <exclude>*.sh/</exclude> - </excludes> - </fileSet> - <fileSet> - <directory>${project.basedir}/../dev</directory> - <fileMode>0644</fileMode> - <directoryMode>0755</directoryMode> - </fileSet> - <fileSet> - <directory>${project.basedir}/../docs</directory> - <fileMode>0644</fileMode> - <directoryMode>0755</directoryMode> - </fileSet> - <fileSet> - <directory>${project.basedir}/../examples</directory> - <fileMode>0644</fileMode> - <directoryMode>0755</directoryMode> - </fileSet> - <!-- Add the client & mapreduce jars. Expects the client jar packaging phase to already be run, - which is determined by specification order in the pom. --> - <fileSet> - <directory>target</directory> - <outputDirectory>/</outputDirectory> - <includes> - <include>phoenix-*-client.jar</include> - <include>phoenix-*-mapreduce.jar</include> - </includes> - </fileSet> - <!-- This is only necessary until maven fixes the intra-project dependency bug - in maven 3.0. Until then, we have to include the jars for sub-projects explicitly. - Otherwise, test jars are pulled in wrongly. - --> - <fileSet> - <directory>${project.basedir}/../phoenix-hadoop-compat/target/</directory> - <outputDirectory>lib</outputDirectory> - <includes> - <include>phoenix-*.jar</include> - </includes> - <fileMode>0644</fileMode> - </fileSet> - <fileSet> - <directory>${project.basedir}/../phoenix-hadoop1-compat/target/</directory> - <outputDirectory>lib</outputDirectory> - <includes> - <include>phoenix-*.jar</include> - </includes> - <fileMode>0644</fileMode> - </fileSet> - <fileSet> - <directory>${project.basedir}/../phoenix-hadoop2-compat/target/</directory> - <outputDirectory>lib</outputDirectory> - <includes> - <include>phoenix-*.jar</include> - </includes> - <fileMode>0644</fileMode> - </fileSet> - <fileSet> - <directory>${project.basedir}/../phoenix-pig/target/</directory> - <outputDirectory>lib</outputDirectory> - <includes> - <include>phoenix-*.jar</include> - </includes> - <fileMode>0644</fileMode> - </fileSet> - <fileSet> - <directory>${project.basedir}/../phoenix-flume/target/</directory> - <outputDirectory>lib</outputDirectory> - <includes> - <include>phoenix-*.jar</include> - </includes> - <fileMode>0644</fileMode> - </fileSet> - <fileSet> - <directory>${project.basedir}/../phoenix-core/target/</directory> - <outputDirectory>lib</outputDirectory> - <includes> - <include>phoenix-*.jar</include> - </includes> - <excludes> - <exclude></exclude> - </excludes> - <fileMode>0644</fileMode> - </fileSet> - </fileSets> - - <!-- And add all of our dependencies --> - <dependencySets> - <dependencySet> - <!-- Unpack all the dependencies to class files, since java doesn't support - jar of jars for running --> - <unpack>false</unpack> - <outputDirectory>/lib</outputDirectory> - <includes> - <include>commons-configuration:commons-configuration</include> - <include>commons-io:commons-io</include> - <include>commons-lang:commons-lang</include> - <include>commons-logging:commons-logging</include> - <include>com.google.guava:guava</include> - <include>org.apache.hadoop:hadoop*</include> - <include>com.google.protobuf:protobuf-java</include> - <include>org.slf4j:slf4j-api</include> - <include>org.slf4j:slf4j-log4j12</include> - <include>org.apache.zookeeper:zookeeper</include> - <include>log4j:log4j</include> - <include>org.apache.hbase:hbase*</include> - <include>org.antlr:antlr</include> - <include>org.cloudera.htrace:htrace-core</include> - <include>io.netty:netty</include> - <include>commons-codec:commons-codec</include> - </includes> - </dependencySet> - <!-- Separate dependency set to just pull in the jackson stuff since its test - scoped and we only include 'runtime' scoped (which includes compile) dependencies --> - <dependencySet> - <unpack>false</unpack> - <scope>test</scope> - <!-- save these dependencies to the top-level --> - <outputDirectory>/lib</outputDirectory> - <includes> - <include>org.codehaus.jackson:jackson-core-asl</include> - <include>org.codehaus.jackson:jackson-mapper-asl</include> - </includes> - </dependencySet> - </dependencySets> -</assembly> http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-assembly/src/build/components/all-common-dependencies.xml ---------------------------------------------------------------------- diff --git a/phoenix-assembly/src/build/components/all-common-dependencies.xml b/phoenix-assembly/src/build/components/all-common-dependencies.xml new file mode 100644 index 0000000..7d943f6 --- /dev/null +++ b/phoenix-assembly/src/build/components/all-common-dependencies.xml @@ -0,0 +1,62 @@ +<?xml version='1.0'?> +<!-- + + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +--> +<component> + <!-- All of our dependencies --> + <dependencySets> + <dependencySet> + <!-- Unpack all the dependencies to class files, since java doesn't support + jar of jars for running --> + <unpack>false</unpack> + <outputDirectory>/lib</outputDirectory> + <includes> + <include>commons-configuration:commons-configuration</include> + <include>commons-io:commons-io</include> + <include>commons-lang:commons-lang</include> + <include>commons-logging:commons-logging</include> + <include>com.google.guava:guava</include> + <include>org.apache.hadoop:hadoop*</include> + <include>com.google.protobuf:protobuf-java</include> + <include>org.slf4j:slf4j-api</include> + <include>org.slf4j:slf4j-log4j12</include> + <include>org.apache.zookeeper:zookeeper</include> + <include>log4j:log4j</include> + <include>org.apache.hbase:hbase*</include> + <include>org.antlr:antlr</include> + <include>org.cloudera.htrace:htrace-core</include> + <include>io.netty:netty</include> + <include>commons-codec:commons-codec</include> + </includes> + </dependencySet> + <!-- Separate dependency set to just pull in the jackson stuff since its test + scoped and we only include 'runtime' scoped (which includes compile) dependencies --> + <dependencySet> + <unpack>false</unpack> + <scope>test</scope> + <!-- save these dependencies to the top-level --> + <outputDirectory>/lib</outputDirectory> + <includes> + <include>org.codehaus.jackson:jackson-core-asl</include> + <include>org.codehaus.jackson:jackson-mapper-asl</include> + </includes> + </dependencySet> + </dependencySets> +</component> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-assembly/src/build/components/all-common-files.xml ---------------------------------------------------------------------- diff --git a/phoenix-assembly/src/build/components/all-common-files.xml b/phoenix-assembly/src/build/components/all-common-files.xml new file mode 100644 index 0000000..60f9159 --- /dev/null +++ b/phoenix-assembly/src/build/components/all-common-files.xml @@ -0,0 +1,71 @@ +<?xml version='1.0'?> +<!-- + + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +--> +<component> + <fileSets> + <!-- Top level directories --> + <fileSet> + <!--Get misc project files --> + <directory>${project.basedir}/..</directory> + <outputDirectory>/</outputDirectory> + <includes> + <include>*.txt</include> + </includes> + </fileSet> + <!-- Copy the executable files from the bin directory --> + <fileSet> + <directory>${project.basedir}/../bin</directory> + <outputDirectory>bin</outputDirectory> + <fileMode>0755</fileMode> + <directoryMode>0755</directoryMode> + <includes> + <include>*.py</include> + <include>*.sh</include> + </includes> + </fileSet> + <!-- Copy the non-executable files from the bin directory --> + <fileSet> + <directory>${project.basedir}/../bin</directory> + <outputDirectory>bin</outputDirectory> + <fileMode>0644</fileMode> + <directoryMode>0755</directoryMode> + <excludes> + <exclude>*.py/</exclude> + <exclude>*.sh/</exclude> + </excludes> + </fileSet> + <fileSet> + <directory>${project.basedir}/../dev</directory> + <fileMode>0644</fileMode> + <directoryMode>0755</directoryMode> + </fileSet> + <fileSet> + <directory>${project.basedir}/../docs</directory> + <fileMode>0644</fileMode> + <directoryMode>0755</directoryMode> + </fileSet> + <fileSet> + <directory>${project.basedir}/../examples</directory> + <fileMode>0644</fileMode> + <directoryMode>0755</directoryMode> + </fileSet> + </fileSets> +</component> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-assembly/src/build/components/all-common-jars.xml ---------------------------------------------------------------------- diff --git a/phoenix-assembly/src/build/components/all-common-jars.xml b/phoenix-assembly/src/build/components/all-common-jars.xml new file mode 100644 index 0000000..3591a64 --- /dev/null +++ b/phoenix-assembly/src/build/components/all-common-jars.xml @@ -0,0 +1,74 @@ +<?xml version='1.0'?> +<!-- + + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +--> +<component> + <fileSets> + <!-- Add the client & mapreduce jars. Expects the client jar packaging phase to already be run, + which is determined by specification order in the pom. --> + <fileSet> + <directory>target</directory> + <outputDirectory>/</outputDirectory> + <includes> + <include>phoenix-*-client.jar</include> + <include>phoenix-*-mapreduce.jar</include> + </includes> + </fileSet> + <!-- This is only necessary until maven fixes the intra-project dependency bug + in maven 3.0. Until then, we have to include the jars for sub-projects explicitly. + Otherwise, test jars are pulled in wrongly. + --> + <fileSet> + <directory>${project.basedir}/../phoenix-hadoop-compat/target/</directory> + <outputDirectory>lib</outputDirectory> + <includes> + <include>phoenix-*.jar</include> + </includes> + <fileMode>0644</fileMode> + </fileSet> + <fileSet> + <directory>${project.basedir}/../phoenix-pig/target/</directory> + <outputDirectory>lib</outputDirectory> + <includes> + <include>phoenix-*.jar</include> + </includes> + <fileMode>0644</fileMode> + </fileSet> + <fileSet> + <directory>${project.basedir}/../phoenix-flume/target/</directory> + <outputDirectory>lib</outputDirectory> + <includes> + <include>phoenix-*.jar</include> + </includes> + <fileMode>0644</fileMode> + </fileSet> + <fileSet> + <directory>${project.basedir}/../phoenix-core/target/</directory> + <outputDirectory>lib</outputDirectory> + <includes> + <include>phoenix-*.jar</include> + </includes> + <excludes> + <exclude></exclude> + </excludes> + <fileMode>0644</fileMode> + </fileSet> + </fileSets> +</component> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-assembly/src/build/hadoop-one-all.xml ---------------------------------------------------------------------- diff --git a/phoenix-assembly/src/build/hadoop-one-all.xml b/phoenix-assembly/src/build/hadoop-one-all.xml new file mode 100644 index 0000000..9c067e0 --- /dev/null +++ b/phoenix-assembly/src/build/hadoop-one-all.xml @@ -0,0 +1,53 @@ +<?xml version='1.0'?> +<!-- + + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +--> + +<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd"> + <!--This 'all' id is not appended to the produced bundle because we do this: http://maven.apache.org/plugins/maven-assembly-plugin/faq.html#required-classifiers --> + <id>all</id> + <formats> + <format>tar.gz</format> + </formats> + <includeBaseDirectory>true</includeBaseDirectory> + + <componentDescriptors> + <componentDescriptor>src/build/components/all-common-jars.xml</componentDescriptor> + <componentDescriptor>src/build/components/all-common-files.xml</componentDescriptor> + <componentDescriptor>src/build/components/all-common-dependencies.xml</componentDescriptor> + </componentDescriptors> + + <fileSets> + <!-- This is only necessary until maven fixes the intra-project dependency bug + in maven 3.0. Until then, we have to include the jars for sub-projects explicitly. + Otherwise, test jars are pulled in wrongly. + --> + <fileSet> + <directory>${project.basedir}/../phoenix-hadoop1-compat/target/</directory> + <outputDirectory>lib</outputDirectory> + <includes> + <include>phoenix-*.jar</include> + </includes> + <fileMode>0644</fileMode> + </fileSet> + </fileSets> +</assembly> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-assembly/src/build/hadoop-two-all.xml ---------------------------------------------------------------------- diff --git a/phoenix-assembly/src/build/hadoop-two-all.xml b/phoenix-assembly/src/build/hadoop-two-all.xml new file mode 100644 index 0000000..02bd854 --- /dev/null +++ b/phoenix-assembly/src/build/hadoop-two-all.xml @@ -0,0 +1,64 @@ +<?xml version='1.0'?> +<!-- + + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +--> + +<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd"> + <!--This 'all' id is not appended to the produced bundle because we do this: http://maven.apache.org/plugins/maven-assembly-plugin/faq.html#required-classifiers --> + <id>all</id> + <formats> + <format>tar.gz</format> + </formats> + <includeBaseDirectory>true</includeBaseDirectory> + + <componentDescriptors> + <componentDescriptor>src/build/components/all-common-jars.xml</componentDescriptor> + <componentDescriptor>src/build/components/all-common-files.xml</componentDescriptor> + <componentDescriptor>src/build/components/all-common-dependencies.xml</componentDescriptor> + </componentDescriptors> + + <fileSets> + <!-- Include the bin files the hadoop2 compat bin directory --> + <fileSet> + <!--Get hadoop2 config files --> + <directory>${project.basedir}/../phoenix-hadoop2-compat/bin</directory> + <outputDirectory>bin</outputDirectory> + <fileMode>0655</fileMode> + <directoryMode>0755</directoryMode> + <includes> + <include>*.properties</include> + </includes> + </fileSet> + <!-- This is only necessary until maven fixes the intra-project dependency bug + in maven 3.0. Until then, we have to include the jars for sub-projects explicitly. + Otherwise, test jars are pulled in wrongly. + --> + <fileSet> + <directory>${project.basedir}/../phoenix-hadoop2-compat/target/</directory> + <outputDirectory>lib</outputDirectory> + <includes> + <include>phoenix-*.jar</include> + </includes> + <fileMode>0644</fileMode> + </fileSet> + </fileSets> +</assembly> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java index 5b43a90..2f7b34f 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java @@ -63,6 +63,8 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; @@ -105,6 +107,7 @@ import org.apache.phoenix.hbase.index.util.GenericKeyValueBuilder; import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr; import org.apache.phoenix.hbase.index.util.IndexManagementUtil; import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData; +import org.apache.phoenix.metrics.Metrics; import org.apache.phoenix.protobuf.ProtobufUtil; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.schema.AmbiguousColumnException; @@ -125,6 +128,7 @@ import org.apache.phoenix.schema.PTableImpl; import org.apache.phoenix.schema.PTableType; import org.apache.phoenix.schema.SortOrder; import org.apache.phoenix.schema.TableNotFoundException; +import org.apache.phoenix.trace.util.Tracing; import org.apache.phoenix.util.ByteUtil; import org.apache.phoenix.util.IndexUtil; import org.apache.phoenix.util.KeyValueUtil; @@ -270,6 +274,8 @@ public class MetaDataEndpointImpl extends MetaDataProtocol implements Coprocesso private RegionCoprocessorEnvironment env; + private static final Log LOG = LogFactory.getLog(MetaDataEndpointImpl.class); + /** * Stores a reference to the coprocessor environment provided by the * {@link org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost} from the region where this @@ -287,6 +293,11 @@ public class MetaDataEndpointImpl extends MetaDataProtocol implements Coprocesso } else { throw new CoprocessorException("Must be loaded on a table region!"); } + + LOG.info("Starting Tracing-Metrics Systems"); + // Start the phoenix trace collection + Tracing.addTraceMetricsSource(); + Metrics.ensureConfigured(); } @Override http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java index 650fedf..5a9dae9 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixConnection.java @@ -128,14 +128,7 @@ public class PhoenixConnection implements Connection, org.apache.phoenix.jdbc.Jd private boolean readOnly = false; static { - // add the phoenix span receiver so we can log the traces. We have a single trace - // source for the whole JVM - try { - Trace.addReceiver(TracingCompat.newTraceMetricSource()); - } catch (RuntimeException e) { - LOG.warn("Tracing will outputs will not be written to any metrics sink! No " - + "TraceMetricsSink found on the classpath", e); - } + Tracing.addTraceMetricsSource(); } private static Properties newPropsWithSCN(long scn, Properties props) { http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-core/src/main/java/org/apache/phoenix/trace/PhoenixTableMetricsWriter.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/trace/PhoenixTableMetricsWriter.java b/phoenix-core/src/main/java/org/apache/phoenix/trace/PhoenixTableMetricsWriter.java index f39db75..7fcb92d 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/trace/PhoenixTableMetricsWriter.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/trace/PhoenixTableMetricsWriter.java @@ -32,6 +32,7 @@ import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; +import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -44,6 +45,7 @@ import org.apache.phoenix.metrics.PhoenixMetricTag; import org.apache.phoenix.metrics.PhoenixMetricsRecord; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; +import org.apache.phoenix.trace.util.Tracing; import org.apache.phoenix.util.QueryUtil; import com.google.common.annotations.VisibleForTesting; @@ -78,20 +80,35 @@ public class PhoenixTableMetricsWriter implements MetricsWriter { @Override public void initialize() { - try { - // create the phoenix connection - Configuration conf = HBaseConfiguration.create(); - Connection conn = QueryUtil.getConnection(conf); - // enable bulk loading when we have enough data - conn.setAutoCommit(true); - - String tableName = - conf.get(QueryServices.TRACING_STATS_TABLE_NAME_ATTRIB, + LOG.info("Phoenix tracing writer started"); + } + + /** + * Initialize <tt>this</tt> only when we need it + */ + private void lazyInitialize() { + synchronized (this) { + if (this.conn != null) { + return; + } + try { + // create the phoenix connection + Properties props = new Properties(); + props.setProperty(QueryServices.TRACING_FREQ_ATTRIB, + Tracing.Frequency.NEVER.getKey()); + Configuration conf = HBaseConfiguration.create(); + Connection conn = QueryUtil.getConnection(props, conf); + // enable bulk loading when we have enough data + conn.setAutoCommit(true); + + String tableName = + conf.get(QueryServices.TRACING_STATS_TABLE_NAME_ATTRIB, QueryServicesOptions.DEFAULT_TRACING_STATS_TABLE_NAME); - initializeInternal(conn, tableName); - } catch (Exception e) { - throw new RuntimeException(e); + initializeInternal(conn, tableName); + } catch (Exception e) { + throw new RuntimeException(e); + } } } @@ -163,6 +180,10 @@ public class PhoenixTableMetricsWriter implements MetricsWriter { if (!record.name().startsWith(TracingCompat.METRIC_SOURCE_KEY)) { return; } + + // don't initialize until we actually have something to write + lazyInitialize(); + String stmt = "UPSERT INTO " + table + " ("; // drop it into the queue of things that should be written List<String> keys = new ArrayList<String>(); http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java b/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java index e90d2c8..f1926f8 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/trace/util/Tracing.java @@ -31,6 +31,7 @@ import org.apache.phoenix.call.CallWrapper; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; +import org.apache.phoenix.trace.TracingCompat; import org.cloudera.htrace.Sampler; import org.cloudera.htrace.Span; import org.cloudera.htrace.Trace; @@ -277,4 +278,25 @@ public class Tracing { scope.close(); } } + + /** + * Track if the tracing system has been initialized for phoenix + */ + private static boolean initialized = false; + + /** + * Add the phoenix span receiver so we can log the traces. We have a single trace source for the + * whole JVM + */ + public synchronized static void addTraceMetricsSource() { + try { + if (!initialized) { + Trace.addReceiver(TracingCompat.newTraceMetricSource()); + } + } catch (RuntimeException e) { + LOG.warn("Tracing will outputs will not be written to any metrics sink! No " + + "TraceMetricsSink found on the classpath", e); + } + initialized = true; + } } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java index db24686..da6b17a 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/util/QueryUtil.java @@ -26,6 +26,7 @@ import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; +import java.util.Properties; import javax.annotation.Nullable; @@ -33,6 +34,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.phoenix.jdbc.PhoenixDriver; @@ -181,16 +183,33 @@ public final class QueryUtil { } return buf.toString(); } + public static Connection getConnection(Configuration conf) throws ClassNotFoundException, - SQLException { - // read the hbase properties from the configuration - String server = ZKConfig.getZKQuorumServersString(conf); + SQLException { + return getConnection(new Properties(), conf); + } + + public static Connection getConnection(Properties props, Configuration conf) + throws ClassNotFoundException, SQLException { // make sure we load the phoenix driver Class.forName(PhoenixDriver.class.getName()); - int port = - conf.getInt(HConstants.ZOOKEEPER_CLIENT_PORT, HConstants.DEFAULT_ZOOKEPER_CLIENT_PORT); + + // read the hbase properties from the configuration + String server = ZKConfig.getZKQuorumServersString(conf); + int port; + // if it has a port, don't try to add one + try { + server = Addressing.parseHostname(server); + port = Addressing.parsePort(server); + } catch (IllegalArgumentException e) { + // port isn't set + port = + conf.getInt(HConstants.ZOOKEEPER_CLIENT_PORT, + HConstants.DEFAULT_ZOOKEPER_CLIENT_PORT); + } + String jdbcUrl = getUrl(server, port); LOG.info("Creating connection with the jdbc url:" + jdbcUrl); - return DriverManager.getConnection(jdbcUrl); - } + return DriverManager.getConnection(jdbcUrl, props); + } } http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/metrics/Metrics.java ---------------------------------------------------------------------- diff --git a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/metrics/Metrics.java b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/metrics/Metrics.java index e0667ab..5bc8545 100644 --- a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/metrics/Metrics.java +++ b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/metrics/Metrics.java @@ -17,12 +17,38 @@ */ package org.apache.phoenix.metrics; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; public class Metrics { + private static final Log LOG = LogFactory.getLog(Metrics.class); + private static volatile MetricsManager manager; + private static boolean initialized; + + /** This must match the prefix that we are using in the hadoop-metrics2 config on the client */ + public static final String METRICS_SYSTEM_NAME = "phoenix"; + public static MetricsManager initialize() { + MetricsManager manager = Metrics.getManager(); + // if the jars aren't on the classpath, then we don't start the metrics system + if (manager == null) { + LOG.warn("Phoenix metrics could not be initialized - no MetricsManager found!"); + return null; + } + // only initialize the metrics system once + synchronized (Metrics.class) { + if (!initialized) { + LOG.info("Initializing metrics system: " + Metrics.METRICS_SYSTEM_NAME); + manager.initialize(Metrics.METRICS_SYSTEM_NAME); + initialized = true; + } + } + return manager; + } + /** * @return get the first {@link MetricsManager} on the classpath. Always returns the same object */ @@ -36,4 +62,19 @@ public class Metrics { } return manager; } + + private static volatile boolean sinkInitialized = false; + + /** + * Mark that the metrics/tracing sink has been initialized + */ + public static void markSinkInitialized() { + sinkInitialized = true; + } + + public static void ensureConfigured() { + if (!sinkInitialized) { + LOG.warn("Phoenix metrics2/tracing sink was not started. Should be it be?"); + } + } } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java ---------------------------------------------------------------------- diff --git a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java index ad4a58d..783bfd6 100644 --- a/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java +++ b/phoenix-hadoop-compat/src/main/java/org/apache/phoenix/trace/TracingCompat.java @@ -45,6 +45,9 @@ public class TracingCompat { /** Set context to enable filtering */ public static final String METRICS_CONTEXT = "tracing"; + /** Marker metric to ensure that we register the tracing mbeans */ + public static final String METRICS_MARKER_CONTEXT = "marker"; + public static void addAnnotation(Span span, String message, int value) { span.addKVAnnotation(message.getBytes(), Bytes.toBytes(value)); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-hadoop2-compat/bin/hadoop-metrics2-hbase.properties ---------------------------------------------------------------------- diff --git a/phoenix-hadoop2-compat/bin/hadoop-metrics2-hbase.properties b/phoenix-hadoop2-compat/bin/hadoop-metrics2-hbase.properties new file mode 100644 index 0000000..499ab6b --- /dev/null +++ b/phoenix-hadoop2-compat/bin/hadoop-metrics2-hbase.properties @@ -0,0 +1,20 @@ +# HBase Server Sink Configuration +################################# +# +# Configuration for the metrics2 system for the HBase RegionServers +# to enable phoenix trace collection on the HBase servers. +# +# See hadoop-metrics2-phoenix.properties for how these configurations +# are utilized. +# +# Either this file can be used in place of the standard +# hadoop-metrics2-hbase.properties file or the below +# properties should be added to the file of the same name on +# the HBase classpath (likely in the HBase conf/ folder) + +# ensure that we receive traces on the server +hbase.sink.tracing.class=org.apache.phoenix.trace.PhoenixMetricsSink +# Tell the sink where to write the metrics +hbase.sink.tracing.writer-class=org.apache.phoenix.trace.PhoenixTableMetricsWriter +# Only handle traces with a context of "tracing" +hbase.sink.tracing.context=tracing http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-hadoop2-compat/bin/hadoop-metrics2-phoenix.properties ---------------------------------------------------------------------- diff --git a/phoenix-hadoop2-compat/bin/hadoop-metrics2-phoenix.properties b/phoenix-hadoop2-compat/bin/hadoop-metrics2-phoenix.properties new file mode 100644 index 0000000..4a5794d --- /dev/null +++ b/phoenix-hadoop2-compat/bin/hadoop-metrics2-phoenix.properties @@ -0,0 +1,53 @@ +# Metrics properties for phoenix +#################################### +# +#There are two options with file names: +# 1. hadoop-metrics2-[prefix].properties +# 2. hadoop-metrics2.properties +# Either will be loaded by the metrics system (but not both). +# +# NOTE: The metrics system is only initialized once per JVM (but does ref-counting, so we can't +#shutdown and restart), so we only load the first prefix that we find. Generally, this will be +#phoenix (unless someone else registers first, but for many clients, there should only be one). +# +# Usually, you would use hadoop-metrics2-phoenix.properties, but we use the generic +# hadoop-metrics2.properties to ensure it these are loaded regardless of where we are running, +# assuming there isn't another config on the classpath. + +# When specifying sinks, the syntax to use is: +# [prefix].[source|sink].[instance].[options] +# The interesting thing to note is that [instance] can literally be anything (as long as its +# not zero-length). It is only there to differentiate the properties that are stored for +# objects of the same type (e.g. differentiating between two phoenix.sink objects). +# +#You could the following lines in your config +# +# phoenix.sink.thingA.class=com.your-company.SpecialSink +# phoenix.sink.thingA.option1=value1 +# +# and also +# +# phoenix.sink.thingB.class=org.apache.phoenix.trace.PhoenixMetricsSink +# phoenix.sink.thingB.doGoodStuff=true +# +# which will create both SpecialSink and PhoenixMetricsSink and register them +# as a MetricsSink, but Special sink will only see option1=value1 in its +# configuration, which similarly, the instantiated PhoenixMetricsSink will +# only see doGoodStuff=true in its configuration +# +# See javadoc of package-info.java for org.apache.hadoop.metrics2 for detail + +# Uncomment to NOT start MBeans +# *.source.start_mbeans=false + +# Sample from all the sources every 10 seconds +*.period=10 + +# Write Traces to Phoenix +########################## +# ensure that we receive traces on the server +phoenix.sink.tracing.class=org.apache.phoenix.trace.PhoenixMetricsSink +# Tell the sink where to write the metrics +phoenix.sink.tracing.writer-class=org.apache.phoenix.trace.PhoenixTableMetricsWriter +# Only handle traces with a context of "tracing" +phoenix.sink.tracing.context=tracing \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/PhoenixMetricsSink.java ---------------------------------------------------------------------- diff --git a/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/PhoenixMetricsSink.java b/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/PhoenixMetricsSink.java new file mode 100644 index 0000000..3de7da3 --- /dev/null +++ b/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/PhoenixMetricsSink.java @@ -0,0 +1,191 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.phoenix.trace; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import javax.annotation.Nullable; + +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.SubsetConfiguration; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.metrics2.AbstractMetric; +import org.apache.hadoop.metrics2.MetricsRecord; +import org.apache.hadoop.metrics2.MetricsSink; +import org.apache.hadoop.metrics2.MetricsTag; +import org.apache.phoenix.metrics.Metrics; +import org.apache.phoenix.metrics.MetricsWriter; +import org.apache.phoenix.metrics.PhoenixAbstractMetric; +import org.apache.phoenix.metrics.PhoenixMetricTag; +import org.apache.phoenix.metrics.PhoenixMetricsRecord; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Function; +import com.google.common.base.Preconditions; +import com.google.common.collect.Iterators; + +/** + * Translate metrics from a Hadoop2 metrics2 metric to a generic PhoenixMetric that a + * {@link MetricsWriter} can then write out. + * <p> + * This class becomes unnecessary once we drop Hadoop1 support. + */ +public class PhoenixMetricsSink implements MetricsSink, TestableMetricsWriter { + + private static final Log LOG = LogFactory.getLog(PhoenixMetricsSink.class); + /** + * Metrics configuration key for the class that should be used for writing the output. + * <p> + * This would actually be set as: <code> + * phoenix.sink.<some instance name>.writer-class + * </code> Where <tt>some instance name</tt> is just any unique name, so properties can be + * differentiated + */ + public static final String PHOENIX_METRICS_WRITER_CLASS = "writer-class"; + + public static void setWriterClass(MetricsWriter writer, Configuration conf) { + conf.setProperty(PHOENIX_METRICS_WRITER_CLASS, writer.getClass().getName()); + } + + private MetricsWriter writer; + + public PhoenixMetricsSink() { + LOG.info("Writing tracing metrics to phoenix table"); + Metrics.markSinkInitialized(); + } + + @Override + public void init(SubsetConfiguration config) { + // instantiate the configured writer class + String clazz = config.getString(PHOENIX_METRICS_WRITER_CLASS); + LOG.info("Instantiating writer class: " + clazz); + this.writer = TracingCompat.initializeWriter(clazz); + Preconditions.checkNotNull(writer, "Could not correctly initialize metrics writer!"); + } + + @Override + @VisibleForTesting + public void setWriterForTesting(MetricsWriter writer) { + this.writer = writer; + } + + @Override + public void putMetrics(MetricsRecord record) { + writer.addMetrics(wrap(record)); + } + + @Override + public void flush() { + writer.flush(); + } + + /** + * Convert the passed record to a {@link PhoenixMetricsRecord} + * @param record to convert + * @return a generic {@link PhoenixMetricsRecord} that delegates to the record in all things + */ + private PhoenixMetricsRecord wrap(final MetricsRecord record) { + return new PhoenixMetricsRecord() { + + @Override + public String name() { + return record.name(); + } + + @Override + public String description() { + return record.description(); + } + + @Override + public Iterable<PhoenixAbstractMetric> metrics() { + final Iterable<AbstractMetric> iterable = record.metrics(); + return new Iterable<PhoenixAbstractMetric>(){ + + @Override + public Iterator<PhoenixAbstractMetric> iterator() { + final Iterator<AbstractMetric> iter = iterable.iterator(); + return Iterators.transform(iter, new Function<AbstractMetric, PhoenixAbstractMetric>() { + + @Override + @Nullable + public PhoenixAbstractMetric apply(@Nullable final AbstractMetric input) { + if (input == null) { + return null; + } + return new PhoenixAbstractMetric() { + + @Override + public Number value() { + return input.value(); + } + + @Override + public String getName() { + return input.name(); + } + + @Override + public String toString() { + return input.toString(); + } + }; + } + }); + } + }; + } + + @Override + public Collection<PhoenixMetricTag> tags() { + Collection<PhoenixMetricTag> tags = new ArrayList<PhoenixMetricTag>(); + Collection<MetricsTag> origTags = record.tags(); + for (final MetricsTag tag : origTags) { + tags.add(new PhoenixMetricTag() { + + @Override + public String name() { + return tag.name(); + } + + @Override + public String description() { + return tag.description(); + } + + @Override + public String value() { + return tag.value(); + } + + @Override + public String toString() { + return tag.toString(); + } + + }); + } + return tags; + } + + }; + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/PhoenixMetricsWriter.java ---------------------------------------------------------------------- diff --git a/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/PhoenixMetricsWriter.java b/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/PhoenixMetricsWriter.java deleted file mode 100644 index 03230ee..0000000 --- a/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/PhoenixMetricsWriter.java +++ /dev/null @@ -1,176 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.phoenix.trace; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; - -import javax.annotation.Nullable; - -import org.apache.commons.configuration.Configuration; -import org.apache.commons.configuration.SubsetConfiguration; -import org.apache.hadoop.metrics2.AbstractMetric; -import org.apache.hadoop.metrics2.MetricsRecord; -import org.apache.hadoop.metrics2.MetricsSink; -import org.apache.hadoop.metrics2.MetricsTag; -import org.apache.phoenix.metrics.MetricsWriter; -import org.apache.phoenix.metrics.PhoenixAbstractMetric; -import org.apache.phoenix.metrics.PhoenixMetricTag; -import org.apache.phoenix.metrics.PhoenixMetricsRecord; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Function; -import com.google.common.base.Preconditions; -import com.google.common.collect.Iterators; - -/** - * Translate metrics from a Hadoop2 metrics2 metric to a generic PhoenixMetric that a - * {@link MetricsWriter} can then write out. - * <p> - * This class becomes unnecessary once we drop Hadoop1 support. - */ -public class PhoenixMetricsWriter implements MetricsSink, TestableMetricsWriter { - - /** - * Metrics configuration key for the class that should be used for writing the output - */ - public static final String PHOENIX_METRICS_WRITER_CLASS = "phoenix.sink.writer-class"; - - public static void setWriterClass(MetricsWriter writer, Configuration conf) { - conf.setProperty(PHOENIX_METRICS_WRITER_CLASS, writer.getClass().getName()); - } - - private MetricsWriter writer; - - @Override - public void init(SubsetConfiguration config) { - // instantiate the configured writer class - String clazz = config.getString(PHOENIX_METRICS_WRITER_CLASS); - this.writer = TracingCompat.initializeWriter(clazz); - Preconditions.checkNotNull(writer, "Could not correctly initialize metrics writer!"); - } - - @Override - @VisibleForTesting - public void setWriterForTesting(MetricsWriter writer) { - this.writer = writer; - } - - @Override - public void putMetrics(MetricsRecord record) { - writer.addMetrics(wrap(record)); - } - - @Override - public void flush() { - writer.flush(); - } - - /** - * Convert the passed record to a {@link PhoenixMetricsRecord} - * @param record to convert - * @return a generic {@link PhoenixMetricsRecord} that delegates to the record in all things - */ - private PhoenixMetricsRecord wrap(final MetricsRecord record) { - return new PhoenixMetricsRecord() { - - @Override - public String name() { - return record.name(); - } - - @Override - public String description() { - return record.description(); - } - - @Override - public Iterable<PhoenixAbstractMetric> metrics() { - final Iterable<AbstractMetric> iterable = record.metrics(); - return new Iterable<PhoenixAbstractMetric>(){ - - @Override - public Iterator<PhoenixAbstractMetric> iterator() { - final Iterator<AbstractMetric> iter = iterable.iterator(); - return Iterators.transform(iter, new Function<AbstractMetric, PhoenixAbstractMetric>() { - - @Override - @Nullable - public PhoenixAbstractMetric apply(@Nullable final AbstractMetric input) { - if (input == null) { - return null; - } - return new PhoenixAbstractMetric() { - - @Override - public Number value() { - return input.value(); - } - - @Override - public String getName() { - return input.name(); - } - - @Override - public String toString() { - return input.toString(); - } - }; - } - }); - } - }; - } - - @Override - public Collection<PhoenixMetricTag> tags() { - Collection<PhoenixMetricTag> tags = new ArrayList<PhoenixMetricTag>(); - Collection<MetricsTag> origTags = record.tags(); - for (final MetricsTag tag : origTags) { - tags.add(new PhoenixMetricTag() { - - @Override - public String name() { - return tag.name(); - } - - @Override - public String description() { - return tag.description(); - } - - @Override - public String value() { - return tag.value(); - } - - @Override - public String toString() { - return tag.toString(); - } - - }); - } - return tags; - } - - }; - } -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/TraceMetricSource.java ---------------------------------------------------------------------- diff --git a/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/TraceMetricSource.java b/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/TraceMetricSource.java index 5876771..5374695 100644 --- a/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/TraceMetricSource.java +++ b/phoenix-hadoop2-compat/src/main/java/org/apache/phoenix/trace/TraceMetricSource.java @@ -87,15 +87,12 @@ public class TraceMetricSource implements PhoenixSpanReceiver, MetricsSource { private static final String EMPTY_STRING = ""; - /** This must match the prefix that we are using in the hadoop-metrics2 config */ - private static final String METRICS_SYSTEM_NAME = "phoenix"; private static final String CONTEXT = "tracing"; private List<Metric> spans = new ArrayList<Metric>(); public TraceMetricSource() { - MetricsManager manager = Metrics.getManager(); - manager.initialize(METRICS_SYSTEM_NAME); + MetricsManager manager = Metrics.initialize(); // Register this instance. // For right now, we ignore the MBean registration issues that show up in DEBUG logs. Basically, @@ -135,6 +132,14 @@ public class TraceMetricSource implements PhoenixSpanReceiver, MetricsSource { @Override public void getMetrics(MetricsCollector collector, boolean all) { + // add a marker record so we know how many spans are used + // this is also necessary to ensure that we register the metrics source as an MBean (avoiding a + // runtime warning) + MetricsRecordBuilder marker = collector.addRecord(TracingCompat.METRICS_MARKER_CONTEXT); + marker.add(new MetricsTag((MetricsInfo) new MetricsInfoImpl("stat", "num spans"), Integer + .toString(spans.size()))); + + // actually convert the known spans into metric records as well synchronized (this) { for (Metric span : spans) { MetricsRecordBuilder builder = collector.addRecord(new MetricsInfoImpl(TracingCompat http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/PhoenixMetricsWriterTest.java ---------------------------------------------------------------------- diff --git a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/PhoenixMetricsWriterTest.java b/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/PhoenixMetricsWriterTest.java index 0c8bbab..f865723 100644 --- a/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/PhoenixMetricsWriterTest.java +++ b/phoenix-hadoop2-compat/src/test/java/org/apache/phoenix/trace/PhoenixMetricsWriterTest.java @@ -52,7 +52,7 @@ public class PhoenixMetricsWriterTest { MetricsWriter mockSink = Mockito.mock(MetricsWriter.class); // writer that will translate to the sink (specific to hadoop version used) - PhoenixMetricsWriter writer = new PhoenixMetricsWriter(); + PhoenixMetricsSink writer = new PhoenixMetricsSink(); writer.setWriterForTesting(mockSink); // create a simple metrics record http://git-wip-us.apache.org/repos/asf/phoenix/blob/572724ef/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index 492375d..9d918f0 100644 --- a/pom.xml +++ b/pom.xml @@ -564,7 +564,8 @@ <properties> <hadoop.version>${hadoop-one.version}</hadoop.version> <compat.module>phoenix-hadoop1-compat</compat.module> - <assembly.file>src/main/assembly/hadoop-one-compat.xml</assembly.file> + <!-- name of the descriptor file for all the components (including hadoop1) --> + <assembly.descriptor>hadoop-one-all.xml</assembly.descriptor> </properties> <dependencyManagement> <dependencies> @@ -665,7 +666,8 @@ <properties> <hadoop.version>${hadoop-two.version}</hadoop.version> <compat.module>phoenix-hadoop2-compat</compat.module> - <assembly.file>src/main/assembly/hadoop-two-compat.xml</assembly.file> + <!-- name of the descriptor file for all the components (including hadoop2) --> + <assembly.descriptor>hadoop-two-all.xml</assembly.descriptor> </properties> <dependencyManagement> <dependencies>