http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ApplicationFinishDataPBImpl.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ApplicationFinishDataPBImpl.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ApplicationFinishDataPBImpl.java new file mode 100644 index 0000000..ba7e7f0 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ApplicationFinishDataPBImpl.java @@ -0,0 +1,226 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.metrics.records.impl.pb; + +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; +import org.apache.hadoop.yarn.api.records.YarnApplicationState; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProtoOrBuilder; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto; +import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto; +import org.apache.ambari.metrics.records.ApplicationFinishData; + +import com.google.protobuf.TextFormat; + +public class ApplicationFinishDataPBImpl extends ApplicationFinishData { + + ApplicationFinishDataProto proto = ApplicationFinishDataProto + .getDefaultInstance(); + ApplicationFinishDataProto.Builder builder = null; + boolean viaProto = false; + + private ApplicationId applicationId; + + public ApplicationFinishDataPBImpl() { + builder = ApplicationFinishDataProto.newBuilder(); + } + + public ApplicationFinishDataPBImpl(ApplicationFinishDataProto proto) { + this.proto = proto; + viaProto = true; + } + + @Override + public ApplicationId getApplicationId() { + if (this.applicationId != null) { + return this.applicationId; + } + ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasApplicationId()) { + return null; + } + this.applicationId = convertFromProtoFormat(p.getApplicationId()); + return this.applicationId; + } + + @Override + public void setApplicationId(ApplicationId applicationId) { + maybeInitBuilder(); + if (applicationId == null) { + builder.clearApplicationId(); + } + this.applicationId = applicationId; + } + + @Override + public long getFinishTime() { + ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder; + return p.getFinishTime(); + } + + @Override + public void setFinishTime(long finishTime) { + maybeInitBuilder(); + builder.setFinishTime(finishTime); + } + + @Override + public String getDiagnosticsInfo() { + ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasDiagnosticsInfo()) { + return null; + } + return p.getDiagnosticsInfo(); + } + + @Override + public void setDiagnosticsInfo(String diagnosticsInfo) { + maybeInitBuilder(); + if (diagnosticsInfo == null) { + builder.clearDiagnosticsInfo(); + return; + } + builder.setDiagnosticsInfo(diagnosticsInfo); + } + + @Override + public FinalApplicationStatus getFinalApplicationStatus() { + ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasFinalApplicationStatus()) { + return null; + } + return convertFromProtoFormat(p.getFinalApplicationStatus()); + } + + @Override + public void setFinalApplicationStatus( + FinalApplicationStatus finalApplicationStatus) { + maybeInitBuilder(); + if (finalApplicationStatus == null) { + builder.clearFinalApplicationStatus(); + return; + } + builder + .setFinalApplicationStatus(convertToProtoFormat(finalApplicationStatus)); + } + + @Override + public YarnApplicationState getYarnApplicationState() { + ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasYarnApplicationState()) { + return null; + } + return convertFromProtoFormat(p.getYarnApplicationState()); + } + + @Override + public void setYarnApplicationState(YarnApplicationState state) { + maybeInitBuilder(); + if (state == null) { + builder.clearYarnApplicationState(); + return; + } + builder.setYarnApplicationState(convertToProtoFormat(state)); + } + + public ApplicationFinishDataProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public int hashCode() { + return getProto().hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + @Override + public String toString() { + return TextFormat.shortDebugString(getProto()); + } + + private void mergeLocalToBuilder() { + if (this.applicationId != null + && !((ApplicationIdPBImpl) this.applicationId).getProto().equals( + builder.getApplicationId())) { + builder.setApplicationId(convertToProtoFormat(this.applicationId)); + } + } + + private void mergeLocalToProto() { + if (viaProto) { + maybeInitBuilder(); + } + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = ApplicationFinishDataProto.newBuilder(proto); + } + viaProto = false; + } + + private ApplicationIdProto convertToProtoFormat(ApplicationId applicationId) { + return ((ApplicationIdPBImpl) applicationId).getProto(); + } + + private ApplicationIdPBImpl convertFromProtoFormat( + ApplicationIdProto applicationId) { + return new ApplicationIdPBImpl(applicationId); + } + + private FinalApplicationStatus convertFromProtoFormat( + FinalApplicationStatusProto finalApplicationStatus) { + return ProtoUtils.convertFromProtoFormat(finalApplicationStatus); + } + + private FinalApplicationStatusProto convertToProtoFormat( + FinalApplicationStatus finalApplicationStatus) { + return ProtoUtils.convertToProtoFormat(finalApplicationStatus); + } + + private YarnApplicationStateProto convertToProtoFormat( + YarnApplicationState state) { + return ProtoUtils.convertToProtoFormat(state); + } + + private YarnApplicationState convertFromProtoFormat( + YarnApplicationStateProto yarnApplicationState) { + return ProtoUtils.convertFromProtoFormat(yarnApplicationState); + } + +}
http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ApplicationStartDataPBImpl.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ApplicationStartDataPBImpl.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ApplicationStartDataPBImpl.java new file mode 100644 index 0000000..f47ab00 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ApplicationStartDataPBImpl.java @@ -0,0 +1,229 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.metrics.records.impl.pb; + +import org.apache.ambari.metrics.records.ApplicationStartData; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProtoOrBuilder; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; + +import com.google.protobuf.TextFormat; + +public class ApplicationStartDataPBImpl extends ApplicationStartData { + + ApplicationStartDataProto proto = ApplicationStartDataProto + .getDefaultInstance(); + ApplicationStartDataProto.Builder builder = null; + boolean viaProto = false; + + private ApplicationId applicationId; + + public ApplicationStartDataPBImpl() { + builder = ApplicationStartDataProto.newBuilder(); + } + + public ApplicationStartDataPBImpl(ApplicationStartDataProto proto) { + this.proto = proto; + viaProto = true; + } + + @Override + public ApplicationId getApplicationId() { + if (this.applicationId != null) { + return this.applicationId; + } + ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasApplicationId()) { + return null; + } + this.applicationId = convertFromProtoFormat(p.getApplicationId()); + return this.applicationId; + } + + @Override + public void setApplicationId(ApplicationId applicationId) { + maybeInitBuilder(); + if (applicationId == null) { + builder.clearApplicationId(); + } + this.applicationId = applicationId; + } + + @Override + public String getApplicationName() { + ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasApplicationName()) { + return null; + } + return p.getApplicationName(); + } + + @Override + public void setApplicationName(String applicationName) { + maybeInitBuilder(); + if (applicationName == null) { + builder.clearApplicationName(); + return; + } + builder.setApplicationName(applicationName); + } + + @Override + public String getApplicationType() { + ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasApplicationType()) { + return null; + } + return p.getApplicationType(); + } + + @Override + public void setApplicationType(String applicationType) { + maybeInitBuilder(); + if (applicationType == null) { + builder.clearApplicationType(); + return; + } + builder.setApplicationType(applicationType); + } + + @Override + public String getUser() { + ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasUser()) { + return null; + } + return p.getUser(); + } + + @Override + public void setUser(String user) { + maybeInitBuilder(); + if (user == null) { + builder.clearUser(); + return; + } + builder.setUser(user); + } + + @Override + public String getQueue() { + ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasQueue()) { + return null; + } + return p.getQueue(); + } + + @Override + public void setQueue(String queue) { + maybeInitBuilder(); + if (queue == null) { + builder.clearQueue(); + return; + } + builder.setQueue(queue); + } + + @Override + public long getSubmitTime() { + ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; + return p.getSubmitTime(); + } + + @Override + public void setSubmitTime(long submitTime) { + maybeInitBuilder(); + builder.setSubmitTime(submitTime); + } + + @Override + public long getStartTime() { + ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder; + return p.getStartTime(); + } + + @Override + public void setStartTime(long startTime) { + maybeInitBuilder(); + builder.setStartTime(startTime); + } + + public ApplicationStartDataProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public int hashCode() { + return getProto().hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + @Override + public String toString() { + return TextFormat.shortDebugString(getProto()); + } + + private void mergeLocalToBuilder() { + if (this.applicationId != null + && !((ApplicationIdPBImpl) this.applicationId).getProto().equals( + builder.getApplicationId())) { + builder.setApplicationId(convertToProtoFormat(this.applicationId)); + } + } + + private void mergeLocalToProto() { + if (viaProto) { + maybeInitBuilder(); + } + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = ApplicationStartDataProto.newBuilder(proto); + } + viaProto = false; + } + + private ApplicationIdProto convertToProtoFormat(ApplicationId applicationId) { + return ((ApplicationIdPBImpl) applicationId).getProto(); + } + + private ApplicationIdPBImpl convertFromProtoFormat( + ApplicationIdProto applicationId) { + return new ApplicationIdPBImpl(applicationId); + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ContainerFinishDataPBImpl.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ContainerFinishDataPBImpl.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ContainerFinishDataPBImpl.java new file mode 100644 index 0000000..e9aeb92 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ContainerFinishDataPBImpl.java @@ -0,0 +1,204 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.metrics.records.impl.pb; + +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.ContainerState; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProtoOrBuilder; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto; +import org.apache.ambari.metrics.records.ContainerFinishData; + +import com.google.protobuf.TextFormat; + +public class ContainerFinishDataPBImpl extends ContainerFinishData { + + ContainerFinishDataProto proto = ContainerFinishDataProto + .getDefaultInstance(); + ContainerFinishDataProto.Builder builder = null; + boolean viaProto = false; + + private ContainerId containerId; + + public ContainerFinishDataPBImpl() { + builder = ContainerFinishDataProto.newBuilder(); + } + + public ContainerFinishDataPBImpl(ContainerFinishDataProto proto) { + this.proto = proto; + viaProto = true; + } + + @Override + public ContainerId getContainerId() { + if (this.containerId != null) { + return this.containerId; + } + ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasContainerId()) { + return null; + } + this.containerId = convertFromProtoFormat(p.getContainerId()); + return this.containerId; + } + + @Override + public void setContainerId(ContainerId containerId) { + maybeInitBuilder(); + if (containerId == null) { + builder.clearContainerId(); + } + this.containerId = containerId; + } + + @Override + public long getFinishTime() { + ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder; + return p.getFinishTime(); + } + + @Override + public void setFinishTime(long finishTime) { + maybeInitBuilder(); + builder.setFinishTime(finishTime); + } + + @Override + public String getDiagnosticsInfo() { + ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasDiagnosticsInfo()) { + return null; + } + return p.getDiagnosticsInfo(); + } + + @Override + public void setDiagnosticsInfo(String diagnosticsInfo) { + maybeInitBuilder(); + if (diagnosticsInfo == null) { + builder.clearDiagnosticsInfo(); + return; + } + builder.setDiagnosticsInfo(diagnosticsInfo); + } + + @Override + public int getContainerExitStatus() { + ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder; + return p.getContainerExitStatus(); + } + + @Override + public ContainerState getContainerState() { + ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasContainerState()) { + return null; + } + return convertFromProtoFormat(p.getContainerState()); + } + + @Override + public void setContainerState(ContainerState state) { + maybeInitBuilder(); + if (state == null) { + builder.clearContainerState(); + return; + } + builder.setContainerState(convertToProtoFormat(state)); + } + + @Override + public void setContainerExitStatus(int containerExitStatus) { + maybeInitBuilder(); + builder.setContainerExitStatus(containerExitStatus); + } + + public ContainerFinishDataProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public int hashCode() { + return getProto().hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + @Override + public String toString() { + return TextFormat.shortDebugString(getProto()); + } + + private void mergeLocalToBuilder() { + if (this.containerId != null + && !((ContainerIdPBImpl) this.containerId).getProto().equals( + builder.getContainerId())) { + builder.setContainerId(convertToProtoFormat(this.containerId)); + } + } + + private void mergeLocalToProto() { + if (viaProto) { + maybeInitBuilder(); + } + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = ContainerFinishDataProto.newBuilder(proto); + } + viaProto = false; + } + + private ContainerIdProto convertToProtoFormat(ContainerId containerId) { + return ((ContainerIdPBImpl) containerId).getProto(); + } + + private ContainerIdPBImpl + convertFromProtoFormat(ContainerIdProto containerId) { + return new ContainerIdPBImpl(containerId); + } + + private ContainerStateProto convertToProtoFormat(ContainerState state) { + return ProtoUtils.convertToProtoFormat(state); + } + + private ContainerState convertFromProtoFormat( + ContainerStateProto containerState) { + return ProtoUtils.convertFromProtoFormat(containerState); + } + +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ContainerStartDataPBImpl.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ContainerStartDataPBImpl.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ContainerStartDataPBImpl.java new file mode 100644 index 0000000..e7d0762 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/records/impl/pb/ContainerStartDataPBImpl.java @@ -0,0 +1,258 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.metrics.records.impl.pb; + +import org.apache.ambari.metrics.records.ContainerStartData; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.Priority; +import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PriorityPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProtoOrBuilder; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto; + +import com.google.protobuf.TextFormat; + +public class ContainerStartDataPBImpl extends ContainerStartData { + + ContainerStartDataProto proto = ContainerStartDataProto.getDefaultInstance(); + ContainerStartDataProto.Builder builder = null; + boolean viaProto = false; + + private ContainerId containerId; + private Resource resource; + private NodeId nodeId; + private Priority priority; + + public ContainerStartDataPBImpl() { + builder = ContainerStartDataProto.newBuilder(); + } + + public ContainerStartDataPBImpl(ContainerStartDataProto proto) { + this.proto = proto; + viaProto = true; + } + + @Override + public ContainerId getContainerId() { + if (this.containerId != null) { + return this.containerId; + } + ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasContainerId()) { + return null; + } + this.containerId = convertFromProtoFormat(p.getContainerId()); + return this.containerId; + } + + @Override + public void setContainerId(ContainerId containerId) { + maybeInitBuilder(); + if (containerId == null) { + builder.clearContainerId(); + } + this.containerId = containerId; + } + + @Override + public Resource getAllocatedResource() { + if (this.resource != null) { + return this.resource; + } + ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasAllocatedResource()) { + return null; + } + this.resource = convertFromProtoFormat(p.getAllocatedResource()); + return this.resource; + } + + @Override + public void setAllocatedResource(Resource resource) { + maybeInitBuilder(); + if (resource == null) { + builder.clearAllocatedResource(); + } + this.resource = resource; + } + + @Override + public NodeId getAssignedNode() { + if (this.nodeId != null) { + return this.nodeId; + } + ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasAssignedNodeId()) { + return null; + } + this.nodeId = convertFromProtoFormat(p.getAssignedNodeId()); + return this.nodeId; + } + + @Override + public void setAssignedNode(NodeId nodeId) { + maybeInitBuilder(); + if (nodeId == null) { + builder.clearAssignedNodeId(); + } + this.nodeId = nodeId; + } + + @Override + public Priority getPriority() { + if (this.priority != null) { + return this.priority; + } + ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasPriority()) { + return null; + } + this.priority = convertFromProtoFormat(p.getPriority()); + return this.priority; + } + + @Override + public void setPriority(Priority priority) { + maybeInitBuilder(); + if (priority == null) { + builder.clearPriority(); + } + this.priority = priority; + } + + @Override + public long getStartTime() { + ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder; + return p.getStartTime(); + } + + @Override + public void setStartTime(long startTime) { + maybeInitBuilder(); + builder.setStartTime(startTime); + } + + public ContainerStartDataProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public int hashCode() { + return getProto().hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + @Override + public String toString() { + return TextFormat.shortDebugString(getProto()); + } + + private void mergeLocalToBuilder() { + if (this.containerId != null + && !((ContainerIdPBImpl) this.containerId).getProto().equals( + builder.getContainerId())) { + builder.setContainerId(convertToProtoFormat(this.containerId)); + } + if (this.resource != null + && !((ResourcePBImpl) this.resource).getProto().equals( + builder.getAllocatedResource())) { + builder.setAllocatedResource(convertToProtoFormat(this.resource)); + } + if (this.nodeId != null + && !((NodeIdPBImpl) this.nodeId).getProto().equals( + builder.getAssignedNodeId())) { + builder.setAssignedNodeId(convertToProtoFormat(this.nodeId)); + } + if (this.priority != null + && !((PriorityPBImpl) this.priority).getProto().equals( + builder.getPriority())) { + builder.setPriority(convertToProtoFormat(this.priority)); + } + } + + private void mergeLocalToProto() { + if (viaProto) { + maybeInitBuilder(); + } + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = ContainerStartDataProto.newBuilder(proto); + } + viaProto = false; + } + + private ContainerIdProto convertToProtoFormat(ContainerId containerId) { + return ((ContainerIdPBImpl) containerId).getProto(); + } + + private ContainerIdPBImpl + convertFromProtoFormat(ContainerIdProto containerId) { + return new ContainerIdPBImpl(containerId); + } + + private ResourceProto convertToProtoFormat(Resource resource) { + return ((ResourcePBImpl) resource).getProto(); + } + + private ResourcePBImpl convertFromProtoFormat(ResourceProto resource) { + return new ResourcePBImpl(resource); + } + + private NodeIdProto convertToProtoFormat(NodeId nodeId) { + return ((NodeIdPBImpl) nodeId).getProto(); + } + + private NodeIdPBImpl convertFromProtoFormat(NodeIdProto nodeId) { + return new NodeIdPBImpl(nodeId); + } + + private PriorityProto convertToProtoFormat(Priority priority) { + return ((PriorityPBImpl) priority).getProto(); + } + + private PriorityPBImpl convertFromProtoFormat(PriorityProto priority) { + return new PriorityPBImpl(priority); + } + +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/GenericObjectMapper.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/GenericObjectMapper.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/GenericObjectMapper.java new file mode 100644 index 0000000..b802a88 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/GenericObjectMapper.java @@ -0,0 +1,135 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.metrics.timeline; + +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.map.ObjectReader; +import org.codehaus.jackson.map.ObjectWriter; + +/** + * A utility class providing methods for serializing and deserializing + * objects. The {@link #write(Object)} and {@link #read(byte[])} methods are + * used by the {@link LeveldbTimelineStore} to store and retrieve arbitrary + * JSON, while the {@link #writeReverseOrderedLong} and {@link + * #readReverseOrderedLong} methods are used to sort entities in descending + * start time order. + */ [email protected] [email protected] +public class GenericObjectMapper { + private static final byte[] EMPTY_BYTES = new byte[0]; + + public static final ObjectReader OBJECT_READER; + public static final ObjectWriter OBJECT_WRITER; + + static { + ObjectMapper mapper = new ObjectMapper(); + OBJECT_READER = mapper.reader(Object.class); + OBJECT_WRITER = mapper.writer(); + } + + /** + * Serializes an Object into a byte array. Along with {@link #read(byte[])}, + * can be used to serialize an Object and deserialize it into an Object of + * the same type without needing to specify the Object's type, + * as long as it is one of the JSON-compatible objects understood by + * ObjectMapper. + * + * @param o An Object + * @return A byte array representation of the Object + * @throws IOException if there is a write error + */ + public static byte[] write(Object o) throws IOException { + if (o == null) { + return EMPTY_BYTES; + } + return OBJECT_WRITER.writeValueAsBytes(o); + } + + /** + * Deserializes an Object from a byte array created with + * {@link #write(Object)}. + * + * @param b A byte array + * @return An Object + * @throws IOException if there is a read error + */ + public static Object read(byte[] b) throws IOException { + return read(b, 0); + } + + /** + * Deserializes an Object from a byte array at a specified offset, assuming + * the bytes were created with {@link #write(Object)}. + * + * @param b A byte array + * @param offset Offset into the array + * @return An Object + * @throws IOException if there is a read error + */ + public static Object read(byte[] b, int offset) throws IOException { + if (b == null || b.length == 0) { + return null; + } + return OBJECT_READER.readValue(b, offset, b.length - offset); + } + + /** + * Converts a long to a 8-byte array so that lexicographic ordering of the + * produced byte arrays sort the longs in descending order. + * + * @param l A long + * @return A byte array + */ + public static byte[] writeReverseOrderedLong(long l) { + byte[] b = new byte[8]; + return writeReverseOrderedLong(l, b, 0); + } + + public static byte[] writeReverseOrderedLong(long l, byte[] b, int offset) { + b[offset] = (byte)(0x7f ^ ((l >> 56) & 0xff)); + for (int i = offset+1; i < offset+7; i++) { + b[i] = (byte)(0xff ^ ((l >> 8*(7-i)) & 0xff)); + } + b[offset+7] = (byte)(0xff ^ (l & 0xff)); + return b; + } + + /** + * Reads 8 bytes from an array starting at the specified offset and + * converts them to a long. The bytes are assumed to have been created + * with {@link #writeReverseOrderedLong}. + * + * @param b A byte array + * @param offset An offset into the byte array + * @return A long + */ + public static long readReverseOrderedLong(byte[] b, int offset) { + long l = b[offset] & 0xff; + for (int i = 1; i < 8; i++) { + l = l << 8; + l = l | (b[offset+i]&0xff); + } + return l ^ 0x7fffffffffffffffl; + } + +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/NameValuePair.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/NameValuePair.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/NameValuePair.java new file mode 100644 index 0000000..d31a658 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/NameValuePair.java @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.metrics.timeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +/** + * A class holding a name and value pair, used for specifying filters in + * {@link TimelineReader}. + */ [email protected] [email protected] +public class NameValuePair { + String name; + Object value; + + public NameValuePair(String name, Object value) { + this.name = name; + this.value = value; + } + + /** + * Get the name. + * @return The name. + */ + public String getName() { + + return name; + } + + /** + * Get the value. + * @return The value. + */ + public Object getValue() { + return value; + } + + @Override + public String toString() { + return "{ name: " + name + ", value: " + value + " }"; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineReader.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineReader.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineReader.java new file mode 100644 index 0000000..7c6efdb --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineReader.java @@ -0,0 +1,155 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.metrics.timeline; + +import java.io.IOException; +import java.util.Collection; +import java.util.EnumSet; +import java.util.Set; +import java.util.SortedSet; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; + +/** + * This interface is for retrieving timeline information. + */ [email protected] [email protected] +public interface TimelineReader { + + /** + * Possible fields to retrieve for {@link #getEntities} and {@link #getEntity} + * . + */ + enum Field { + EVENTS, + RELATED_ENTITIES, + PRIMARY_FILTERS, + OTHER_INFO, + LAST_EVENT_ONLY + } + + /** + * Default limit for {@link #getEntities} and {@link #getEntityTimelines}. + */ + final long DEFAULT_LIMIT = 100; + + /** + * This method retrieves a list of entity information, {@link TimelineEntity}, + * sorted by the starting timestamp for the entity, descending. The starting + * timestamp of an entity is a timestamp specified by the client. If it is not + * explicitly specified, it will be chosen by the store to be the earliest + * timestamp of the events received in the first put for the entity. + * + * @param entityType + * The type of entities to return (required). + * @param limit + * A limit on the number of entities to return. If null, defaults to + * {@link #DEFAULT_LIMIT}. + * @param windowStart + * The earliest start timestamp to retrieve (exclusive). If null, + * defaults to retrieving all entities until the limit is reached. + * @param windowEnd + * The latest start timestamp to retrieve (inclusive). If null, + * defaults to {@link Long#MAX_VALUE} + * @param fromId + * If fromId is not null, retrieve entities earlier than and + * including the specified ID. If no start time is found for the + * specified ID, an empty list of entities will be returned. The + * windowEnd parameter will take precedence if the start time of this + * entity falls later than windowEnd. + * @param fromTs + * If fromTs is not null, ignore entities that were inserted into the + * store after the given timestamp. The entity's insert timestamp + * used for this comparison is the store's system time when the first + * put for the entity was received (not the entity's start time). + * @param primaryFilter + * Retrieves only entities that have the specified primary filter. If + * null, retrieves all entities. This is an indexed retrieval, and no + * entities that do not match the filter are scanned. + * @param secondaryFilters + * Retrieves only entities that have exact matches for all the + * specified filters in their primary filters or other info. This is + * not an indexed retrieval, so all entities are scanned but only + * those matching the filters are returned. + * @param fieldsToRetrieve + * Specifies which fields of the entity object to retrieve (see + * {@link Field}). If the set of fields contains + * {@link Field#LAST_EVENT_ONLY} and not {@link Field#EVENTS}, the + * most recent event for each entity is retrieved. If null, retrieves + * all fields. + * @return An {@link TimelineEntities} object. + * @throws IOException + */ + TimelineEntities getEntities(String entityType, + Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs, + NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters, + EnumSet<Field> fieldsToRetrieve) throws IOException; + + /** + * This method retrieves the entity information for a given entity. + * + * @param entityId + * The entity whose information will be retrieved. + * @param entityType + * The type of the entity. + * @param fieldsToRetrieve + * Specifies which fields of the entity object to retrieve (see + * {@link Field}). If the set of fields contains + * {@link Field#LAST_EVENT_ONLY} and not {@link Field#EVENTS}, the + * most recent event for each entity is retrieved. If null, retrieves + * all fields. + * @return An {@link TimelineEntity} object. + * @throws IOException + */ + TimelineEntity getEntity(String entityId, String entityType, EnumSet<Field> + fieldsToRetrieve) throws IOException; + + /** + * This method retrieves the events for a list of entities all of the same + * entity type. The events for each entity are sorted in order of their + * timestamps, descending. + * + * @param entityType + * The type of entities to retrieve events for. + * @param entityIds + * The entity IDs to retrieve events for. + * @param limit + * A limit on the number of events to return for each entity. If + * null, defaults to {@link #DEFAULT_LIMIT} events per entity. + * @param windowStart + * If not null, retrieves only events later than the given time + * (exclusive) + * @param windowEnd + * If not null, retrieves only events earlier than the given time + * (inclusive) + * @param eventTypes + * Restricts the events returned to the given types. If null, events + * of all types will be returned. + * @return An {@link TimelineEvents} object. + * @throws IOException + */ + TimelineEvents getEntityTimelines(String entityType, + SortedSet<String> entityIds, Long limit, Long windowStart, + Long windowEnd, Set<String> eventTypes) throws IOException; +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineStore.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineStore.java new file mode 100644 index 0000000..3926ca1 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineStore.java @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.metrics.timeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.service.Service; + [email protected] [email protected] +public interface TimelineStore extends + Service, TimelineReader, TimelineWriter { +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineWriter.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineWriter.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineWriter.java new file mode 100644 index 0000000..4b78d31 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/timeline/TimelineWriter.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.metrics.timeline; + +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; + +/** + * This interface is for storing timeline information. + */ [email protected] [email protected] +public interface TimelineWriter { + + /** + * Stores entity information to the timeline store. Any errors occurring for + * individual put request objects will be reported in the response. + * + * @param data + * An {@link TimelineEntities} object. + * @return An {@link TimelinePutResponse} object. + * @throws IOException + */ + TimelinePutResponse put(TimelineEntities data) throws IOException; + +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/AMSController.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/AMSController.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/AMSController.java new file mode 100644 index 0000000..f87438c --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/AMSController.java @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.metrics.webapp; + +import org.apache.hadoop.yarn.webapp.Controller; + +import com.google.inject.Inject; + +public class AMSController extends Controller { + + @Inject + AMSController(RequestContext ctx) { + super(ctx); + } + + @Override + public void index() { + setTitle("Ambari Metrics Service"); + } + +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/AMSWebApp.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/AMSWebApp.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/AMSWebApp.java new file mode 100644 index 0000000..fa3b47c --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/AMSWebApp.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.metrics.webapp; + +import org.apache.ambari.metrics.core.timeline.TimelineMetricStore; +import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; +import org.apache.hadoop.yarn.webapp.WebApp; +import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; +import org.apache.hadoop.yarn.webapp.YarnWebParams; + +public class AMSWebApp extends WebApp implements YarnWebParams { + + private final TimelineMetricStore timelineMetricStore; + + public AMSWebApp(TimelineMetricStore timelineMetricStore) { + this.timelineMetricStore = timelineMetricStore; + } + + @Override + public void setup() { + bind(YarnJacksonJaxbJsonProvider.class); + bind(TimelineWebServices.class); + bind(GenericExceptionHandler.class); + bind(TimelineMetricStore.class).toInstance(timelineMetricStore); + route("/", AMSController.class); + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/JAXBContextResolver.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/JAXBContextResolver.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/JAXBContextResolver.java new file mode 100644 index 0000000..404d3ea --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/JAXBContextResolver.java @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.metrics.webapp; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import javax.ws.rs.ext.ContextResolver; +import javax.ws.rs.ext.Provider; +import javax.xml.bind.JAXBContext; + +import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo; +import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptsInfo; +import org.apache.hadoop.yarn.server.webapp.dao.AppInfo; +import org.apache.hadoop.yarn.server.webapp.dao.AppsInfo; +import org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo; +import org.apache.hadoop.yarn.server.webapp.dao.ContainersInfo; + +import com.google.inject.Singleton; +import com.sun.jersey.api.json.JSONConfiguration; +import com.sun.jersey.api.json.JSONJAXBContext; + +@Singleton +@Provider +@SuppressWarnings("rawtypes") +public class JAXBContextResolver implements ContextResolver<JAXBContext> { + + private JAXBContext context; + private final Set<Class> types; + + // you have to specify all the dao classes here + private final Class[] cTypes = { AppInfo.class, AppsInfo.class, + AppAttemptInfo.class, AppAttemptsInfo.class, ContainerInfo.class, + ContainersInfo.class }; + + public JAXBContextResolver() throws Exception { + this.types = new HashSet<Class>(Arrays.asList(cTypes)); + this.context = + new JSONJAXBContext(JSONConfiguration.natural().rootUnwrapping(false) + .build(), cTypes); + } + + @Override + public JAXBContext getContext(Class<?> objectType) { + return (types.contains(objectType)) ? context : null; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/TimelineWebServices.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/TimelineWebServices.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/TimelineWebServices.java new file mode 100644 index 0000000..a36b038 --- /dev/null +++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/ambari/metrics/webapp/TimelineWebServices.java @@ -0,0 +1,513 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ambari.metrics.webapp; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.Consumes; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.WebApplicationException; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.metrics2.sink.timeline.AggregationResult; +import org.apache.hadoop.metrics2.sink.timeline.ContainerMetric; +import org.apache.hadoop.metrics2.sink.timeline.Precision; +import org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException; +import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; +import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata; +import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics; +import org.apache.hadoop.metrics2.sink.timeline.TopNConfig; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.ambari.metrics.core.timeline.TimelineMetricStore; +import org.apache.ambari.metrics.timeline.GenericObjectMapper; +import org.apache.ambari.metrics.timeline.NameValuePair; +import org.apache.ambari.metrics.timeline.TimelineReader.Field; +import org.apache.hadoop.yarn.util.timeline.TimelineUtils; +import org.apache.hadoop.yarn.webapp.BadRequestException; + +import com.google.inject.Inject; +import com.google.inject.Singleton; + +@Singleton +@Path("/ws/v1/timeline") +public class TimelineWebServices { + private static final Log LOG = LogFactory.getLog(TimelineWebServices.class); + + private TimelineMetricStore timelineMetricStore; + + @Inject + public TimelineWebServices(TimelineMetricStore timelineMetricStore) { + this.timelineMetricStore = timelineMetricStore; + } + + @XmlRootElement(name = "about") + @XmlAccessorType(XmlAccessType.NONE) + @Public + @Unstable + public static class AboutInfo { + + private String about; + + public AboutInfo() { + + } + + public AboutInfo(String about) { + this.about = about; + } + + @XmlElement(name = "About") + public String getAbout() { + return about; + } + + public void setAbout(String about) { + this.about = about; + } + + } + + /** + * Return the description of the timeline web services. + */ + @GET + @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public AboutInfo about( + @Context HttpServletRequest req, + @Context HttpServletResponse res) { + init(res); + return new AboutInfo("AMS API"); + } + + /** + * Store the given metrics into the timeline store, and return errors that + * happened during storing. + */ + @Path("/metrics") + @POST + @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelinePutResponse postMetrics( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + TimelineMetrics metrics) { + + init(res); + if (metrics == null) { + return new TimelinePutResponse(); + } + + try { + + // TODO: Check ACLs for MetricEntity using the TimelineACLManager. + // TODO: Save owner of the MetricEntity. + + if (LOG.isDebugEnabled()) { + LOG.debug("Storing metrics: " + + TimelineUtils.dumpTimelineRecordtoJSON(metrics, true)); + } + + return timelineMetricStore.putMetrics(metrics); + + } catch (Exception e) { + LOG.error("Error saving metrics.", e); + throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); + } + } + + /** + * Store the given metrics into the timeline store, and return errors that + * happened during storing. + */ + @Path("/metrics/aggregated") + @POST + @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelinePutResponse postAggregatedMetrics( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + AggregationResult metrics) { + + init(res); + if (metrics == null) { + return new TimelinePutResponse(); + } + + try { + if (LOG.isDebugEnabled()) { + LOG.debug("Storing aggregated metrics: " + + TimelineUtils.dumpTimelineRecordtoJSON(metrics, true)); + } + + return timelineMetricStore.putHostAggregatedMetrics(metrics); + } catch (Exception e) { + LOG.error("Error saving metrics.", e); + throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); + } + } + + @Path("/containermetrics") + @POST + @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */}) + public TimelinePutResponse postContainerMetrics( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + List<ContainerMetric> metrics) { + init(res); + if (metrics == null || metrics.isEmpty()) { + return new TimelinePutResponse(); + } + + try { + if (LOG.isDebugEnabled()) { + LOG.debug("Storing container metrics: " + TimelineUtils + .dumpTimelineRecordtoJSON(metrics, true)); + } + + return timelineMetricStore.putContainerMetrics(metrics); + + } catch (Exception e) { + LOG.error("Error saving metrics.", e); + throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); + } + } + + /** + * Query for a set of different metrics satisfying the filter criteria. + * All query params are optional. The default limit will apply if none + * specified. + * + * @param metricNames Comma separated list of metrics to retrieve. + * @param appId Application Id for the requested metrics. + * @param instanceId Application instance id. + * @param hostname Hostname where the metrics originated. + * @param startTime Start time for the metric records retrieved. + * @param precision Precision [ seconds, minutes, hours ] + * @param limit limit on total number of {@link TimelineMetric} records + * retrieved. + * @return {@link @TimelineMetrics} + */ + @GET + @Path("/metrics") + @Produces({ MediaType.APPLICATION_JSON }) + public TimelineMetrics getTimelineMetrics( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + @QueryParam("metricNames") String metricNames, + @QueryParam("appId") String appId, + @QueryParam("instanceId") String instanceId, + @QueryParam("hostname") String hostname, + @QueryParam("startTime") String startTime, + @QueryParam("endTime") String endTime, + @QueryParam("precision") String precision, + @QueryParam("limit") String limit, + @QueryParam("grouped") String grouped, + @QueryParam("topN") String topN, + @QueryParam("topNFunction") String topNFunction, + @QueryParam("isBottomN") String isBottomN, + @QueryParam("seriesAggregateFunction") String seriesAggregateFunction + ) { + init(res); + try { + if (LOG.isDebugEnabled()) { + LOG.debug("Request for metrics => metricNames: " + metricNames + ", " + + "appId: " + appId + ", instanceId: " + instanceId + ", " + + "hostname: " + hostname + ", startTime: " + startTime + ", " + + "endTime: " + endTime + ", " + + "precision: " + precision + "seriesAggregateFunction: " + seriesAggregateFunction); + } + + return timelineMetricStore.getTimelineMetrics( + parseListStr(metricNames, ","), parseListStr(hostname, ","), appId, parseStr(instanceId), + parseLongStr(startTime), parseLongStr(endTime), + Precision.getPrecision(precision), parseIntStr(limit), + parseBoolean(grouped), parseTopNConfig(topN, topNFunction, isBottomN), + seriesAggregateFunction); + + } catch (NumberFormatException ne) { + throw new BadRequestException("startTime and limit should be numeric " + + "values"); + } catch (Precision.PrecisionFormatException pfe) { + throw new BadRequestException("precision should be seconds, minutes " + + "or hours"); + } catch (PrecisionLimitExceededException iae) { + throw new PrecisionLimitExceededException(iae.getMessage()); + } catch (IllegalArgumentException iae) { + throw new BadRequestException(iae.getMessage()); + } catch (SQLException | IOException e) { + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } + } + + @GET + @Path("/metrics/metadata") + @Produces({ MediaType.APPLICATION_JSON }) + public Map<String, List<TimelineMetricMetadata>> getTimelineMetricMetadata( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + @QueryParam("appId") String appId, + @QueryParam("metricName") String metricPattern, + @QueryParam("includeAll") String includeBlacklistedMetrics + ) { + init(res); + + try { + return timelineMetricStore.getTimelineMetricMetadata( + parseStr(appId), + parseStr(metricPattern), + parseBoolean(includeBlacklistedMetrics)); + } catch (Exception e) { + throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); + } + } + + @GET + @Path("/metrics/hosts") + @Produces({ MediaType.APPLICATION_JSON }) + public Map<String, Set<String>> getHostedAppsMetadata( + @Context HttpServletRequest req, + @Context HttpServletResponse res + ) { + init(res); + + try { + return timelineMetricStore.getHostAppsMetadata(); + } catch (Exception e) { + throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); + } + } + + @GET + @Path("/metrics/instances") + @Produces({ MediaType.APPLICATION_JSON }) + public Map<String, Map<String, Set<String>>> getClusterHostsMetadata( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + @QueryParam("appId") String appId, + @QueryParam("instanceId") String instanceId + ) { + init(res); + + try { + return timelineMetricStore.getInstanceHostsMetadata(instanceId, appId); + } catch (Exception e) { + throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); + } + } + + @GET + @Path("/metrics/uuid") + @Produces({ MediaType.APPLICATION_JSON }) + public byte[] getUuid( + @Context HttpServletRequest req, + @Context HttpServletResponse res, + @QueryParam("metricName") String metricName, + @QueryParam("appId") String appId, + @QueryParam("instanceId") String instanceId, + @QueryParam("hostname") String hostname + ) { + init(res); + + try { + return timelineMetricStore.getUuid(metricName, appId, instanceId, hostname); + } catch (Exception e) { + throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); + } + } + + /** + * This is a discovery endpoint that advertises known live collector + * instances. Note: It will always answer with current instance as live. + * This can be utilized as a liveliness pinger endpoint since the instance + * names are cached and thereby no synchronous calls result from this API + * + * @return List<String> hostnames</String> + */ + @GET + @Path("/metrics/livenodes") + @Produces({ MediaType.APPLICATION_JSON }) + public List<String> getLiveCollectorNodes( + @Context HttpServletRequest req, + @Context HttpServletResponse res + ) { + init(res); + + return timelineMetricStore.getLiveInstances(); + } + + private void init(HttpServletResponse response) { + response.setContentType(null); + } + + private static SortedSet<String> parseArrayStr(String str, String delimiter) { + if (str == null) { + return null; + } + SortedSet<String> strSet = new TreeSet<String>(); + String[] strs = str.split(delimiter); + for (String aStr : strs) { + strSet.add(aStr.trim()); + } + return strSet; + } + + private static NameValuePair parsePairStr(String str, String delimiter) { + if (str == null) { + return null; + } + String[] strs = str.split(delimiter, 2); + try { + return new NameValuePair(strs[0].trim(), + GenericObjectMapper.OBJECT_READER.readValue(strs[1].trim())); + } catch (Exception e) { + // didn't work as an Object, keep it as a String + return new NameValuePair(strs[0].trim(), strs[1].trim()); + } + } + + private static Collection<NameValuePair> parsePairsStr( + String str, String aDelimiter, String pDelimiter) { + if (str == null) { + return null; + } + String[] strs = str.split(aDelimiter); + Set<NameValuePair> pairs = new HashSet<NameValuePair>(); + for (String aStr : strs) { + pairs.add(parsePairStr(aStr, pDelimiter)); + } + return pairs; + } + + private static EnumSet<Field> parseFieldsStr(String str, String delimiter) { + if (str == null) { + return null; + } + String[] strs = str.split(delimiter); + List<Field> fieldList = new ArrayList<Field>(); + for (String s : strs) { + s = s.trim().toUpperCase(); + if (s.equals("EVENTS")) { + fieldList.add(Field.EVENTS); + } else if (s.equals("LASTEVENTONLY")) { + fieldList.add(Field.LAST_EVENT_ONLY); + } else if (s.equals("RELATEDENTITIES")) { + fieldList.add(Field.RELATED_ENTITIES); + } else if (s.equals("PRIMARYFILTERS")) { + fieldList.add(Field.PRIMARY_FILTERS); + } else if (s.equals("OTHERINFO")) { + fieldList.add(Field.OTHER_INFO); + } else { + throw new IllegalArgumentException("Requested nonexistent field " + s); + } + } + if (fieldList.size() == 0) { + return null; + } + Field f1 = fieldList.remove(fieldList.size() - 1); + if (fieldList.size() == 0) { + return EnumSet.of(f1); + } else { + return EnumSet.of(f1, fieldList.toArray(new Field[fieldList.size()])); + } + } + + private static Long parseLongStr(String str) { + return str == null ? null : Long.parseLong(str.trim()); + } + + private static Integer parseIntStr(String str) { + return str == null ? null : Integer.parseInt(str.trim()); + } + + private static boolean parseBoolean(String booleanStr) { + return booleanStr == null || Boolean.parseBoolean(booleanStr); + } + + private static TopNConfig parseTopNConfig(String topN, String topNFunction, + String bottomN) { + if (topN == null || topN.isEmpty()) { + return null; + } + Integer topNValue = parseIntStr(topN); + + if (topNValue == 0) { + LOG.info("Invalid Input for TopN query. Ignoring TopN Request."); + return null; + } + + Boolean isBottomN = (bottomN != null && Boolean.parseBoolean(bottomN)); + return new TopNConfig(topNValue, topNFunction, isBottomN); + } + + /** + * Parses delimited string to list of strings. It skips strings that are + * effectively empty (i.e. only whitespace). + * + */ + private static List<String> parseListStr(String str, String delimiter) { + if (str == null || str.trim().isEmpty()){ + return null; + } + + String[] split = str.trim().split(delimiter); + List<String> list = new ArrayList<String>(split.length); + for (String s : split) { + if (!s.trim().isEmpty()){ + list.add(s); + } + } + + return list; + } + + private static String parseStr(String str) { + String trimmedInstance = (str == null) ? null : str.trim(); + if (trimmedInstance != null) { + if (trimmedInstance.isEmpty() || trimmedInstance.equalsIgnoreCase("undefined")) { + trimmedInstance = null; + } + } + return trimmedInstance; + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/AMSApplicationServer.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/AMSApplicationServer.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/AMSApplicationServer.java deleted file mode 100644 index 38d46ef..0000000 --- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/AMSApplicationServer.java +++ /dev/null @@ -1,141 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.server.applicationhistoryservice; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; -import org.apache.hadoop.metrics2.source.JvmMetrics; -import org.apache.hadoop.service.CompositeService; -import org.apache.hadoop.util.ExitUtil; -import org.apache.hadoop.util.ShutdownHookManager; -import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; -import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; -import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.HBaseTimelineMetricsService; -import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration; -import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore; -import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AMSWebApp; -import org.apache.hadoop.yarn.webapp.WebApp; -import org.apache.hadoop.yarn.webapp.WebApps; - -/** - * Metrics collector web server - */ -public class AMSApplicationServer extends CompositeService { - - public static final int SHUTDOWN_HOOK_PRIORITY = 30; - private static final Log LOG = LogFactory.getLog(AMSApplicationServer.class); - - TimelineMetricStore timelineMetricStore; - private WebApp webApp; - private TimelineMetricConfiguration metricConfiguration; - - public AMSApplicationServer() { - super(AMSApplicationServer.class.getName()); - } - - @Override - protected void serviceInit(Configuration conf) throws Exception { - metricConfiguration = TimelineMetricConfiguration.getInstance(); - metricConfiguration.initialize(); - timelineMetricStore = createTimelineMetricStore(conf); - addIfService(timelineMetricStore); - super.serviceInit(conf); - } - - @Override - protected void serviceStart() throws Exception { - DefaultMetricsSystem.initialize("AmbariMetricsSystem"); - JvmMetrics.initSingleton("AmbariMetricsSystem", null); - - startWebApp(); - super.serviceStart(); - } - - @Override - protected void serviceStop() throws Exception { - if (webApp != null) { - webApp.stop(); - } - - DefaultMetricsSystem.shutdown(); - super.serviceStop(); - } - - static AMSApplicationServer launchAMSApplicationServer(String[] args) { - Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler()); - StringUtils.startupShutdownMessage(AMSApplicationServer.class, args, LOG); - AMSApplicationServer amsApplicationServer = null; - try { - amsApplicationServer = new AMSApplicationServer(); - ShutdownHookManager.get().addShutdownHook( - new CompositeServiceShutdownHook(amsApplicationServer), - SHUTDOWN_HOOK_PRIORITY); - YarnConfiguration conf = new YarnConfiguration(); - amsApplicationServer.init(conf); - amsApplicationServer.start(); - } catch (Throwable t) { - LOG.fatal("Error starting AMSApplicationServer", t); - ExitUtil.terminate(-1, "Error starting AMSApplicationServer"); - } - return amsApplicationServer; - } - - public static void main(String[] args) { - launchAMSApplicationServer(args); - } - - protected TimelineMetricStore createTimelineMetricStore(Configuration conf) { - LOG.info("Creating metrics store."); - return new HBaseTimelineMetricsService(metricConfiguration); - } - - protected void startWebApp() { - String bindAddress = null; - try { - bindAddress = metricConfiguration.getWebappAddress(); - } catch (Exception e) { - throw new ExceptionInInitializerError("Cannot find bind address"); - } - LOG.info("Instantiating metrics collector at " + bindAddress); - try { - Configuration conf = metricConfiguration.getMetricsConf(); - conf.set("hadoop.http.max.threads", String.valueOf(metricConfiguration - .getTimelineMetricsServiceHandlerThreadCount())); - HttpConfig.Policy policy = HttpConfig.Policy.valueOf( - conf.get(TimelineMetricConfiguration.TIMELINE_SERVICE_HTTP_POLICY, - HttpConfig.Policy.HTTP_ONLY.name())); - webApp = - WebApps - .$for("timeline", null, null, "ws") - .withHttpPolicy(conf, policy) - .at(bindAddress) - .start(new AMSWebApp(timelineMetricStore)); - } catch (Exception e) { - String msg = "AHSWebApp failed to start."; - LOG.error(msg, e); - throw new YarnRuntimeException(msg, e); - } - } - -} http://git-wip-us.apache.org/repos/asf/ambari/blob/42112e28/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/LoadRunner.java ---------------------------------------------------------------------- diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/LoadRunner.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/LoadRunner.java deleted file mode 100644 index a58ebd2..0000000 --- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/LoadRunner.java +++ /dev/null @@ -1,155 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics - .loadsimulator; - -import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data.AppID.MASTER_APPS; -import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data.AppID.SLAVE_APPS; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.List; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data.AppID; -import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data.ApplicationInstance; -import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data.HostMetricsGenerator; -import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data.MetricsGeneratorConfigurer; -import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.net.MetricsSender; -import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.net.RestMetricsSender; -import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.TimeStampProvider; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * - */ -public class LoadRunner { - private final static Logger LOG = LoggerFactory.getLogger(LoadRunner.class); - - private final ScheduledExecutorService timer; - private final ExecutorService workersPool; - private final Collection<Callable<String>> workers; - private final long startTime = new Date().getTime(); - private final int collectIntervalMillis; - private final int sendIntervalMillis; - - public LoadRunner(String hostName, - int threadCount, - String metricsHostName, - int minHostIndex, - int collectIntervalMillis, - int sendIntervalMillis, - boolean createMaster) { - this.collectIntervalMillis = collectIntervalMillis; - this.workersPool = Executors.newFixedThreadPool(threadCount); - this.timer = Executors.newScheduledThreadPool(1); - this.sendIntervalMillis = sendIntervalMillis; - - workers = prepareWorkers(hostName, threadCount, metricsHostName, createMaster, minHostIndex); - } - - private Collection<Callable<String>> prepareWorkers(String hostName, - int threadCount, - String metricsHost, - Boolean createMaster, int minHostIndex) { - Collection<Callable<String>> senderWorkers = - new ArrayList<Callable<String>>(threadCount); - - int startIndex = minHostIndex; - if (createMaster) { - String simHost = hostName + startIndex; - addMetricsWorkers(senderWorkers, simHost, metricsHost, MASTER_APPS); - startIndex++; - } - - for (int i = startIndex; i < threadCount + minHostIndex; i++) { - String simHost = hostName + i; - addMetricsWorkers(senderWorkers, simHost, metricsHost, SLAVE_APPS); - } - - return senderWorkers; - } - - private void addMetricsWorkers(Collection<Callable<String>> senderWorkers, - String specificHostName, - String metricsHostName, - AppID[] apps) { - for (AppID app : apps) { - HostMetricsGenerator metricsGenerator = - createApplicationMetrics(specificHostName, app); - MetricsSender sender = new RestMetricsSender(metricsHostName); - senderWorkers.add(new MetricsSenderWorker(sender, metricsGenerator)); - } - } - - private HostMetricsGenerator createApplicationMetrics(String simHost, AppID host) { - ApplicationInstance appInstance = new ApplicationInstance(simHost, host, ""); - TimeStampProvider timeStampProvider = new TimeStampProvider(startTime, - collectIntervalMillis, sendIntervalMillis); - - return MetricsGeneratorConfigurer - .createMetricsForHost(appInstance, timeStampProvider); - } - - public void start() { - timer.scheduleAtFixedRate(new Runnable() { - @Override - public void run() { - try { - runOnce(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } - } - }, 0, sendIntervalMillis, TimeUnit.MILLISECONDS); - } - - public void runOnce() throws InterruptedException { - List<Future<String>> futures = workersPool.invokeAll(workers, - sendIntervalMillis / 2, - TimeUnit.MILLISECONDS); - int done = 0; - - // TODO: correctly count the failed tasks - for (Future<String> future : futures) { - done += future.isDone() ? 1 : 0; - } - - LOG.info("Finished successfully " + done + " tasks "); - } - - public void shutdown() { - timer.shutdownNow(); - workersPool.shutdownNow(); - } - - public static void main(String[] args) { - LoadRunner runner = - new LoadRunner("local", 0, "metrics", 0, 10000, 20000, false); - - runner.start(); - } - -}
