ayushtkn commented on code in PR #5451: URL: https://github.com/apache/hive/pull/5451#discussion_r1762562145
########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + String queryID = lQuery.getQueryDisplay().getQueryId(); + + //In case of live query previously encountered in past loops + if(queryIdToSpanMap.containsKey(queryID)){ + Span rootspan = queryIdToSpanMap.get(queryID); Review Comment: ``` Span rootspan = queryIdToSpanMap.get(queryID); if (rootspan != null) { ``` ########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + String queryID = lQuery.getQueryDisplay().getQueryId(); + + //In case of live query previously encountered in past loops + if(queryIdToSpanMap.containsKey(queryID)){ + Span rootspan = queryIdToSpanMap.get(queryID); + + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if(task.getReturnValue() != null && (queryIdToTasksMap.get(queryID) == null || !queryIdToTasksMap.get(queryID).contains(task.getTaskId()))){ + queryIdToTasksMap.get(queryID).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID+ " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + while (task.getEndTime() == null){ + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + } else { + // In case of live queries being seen for first time + Span rootspan = tracer.spanBuilder(queryID + " - live") + .startSpan(); + List<String> completedTasks = new ArrayList<>(); + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if (task.getReturnValue() != null) { + completedTasks.add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID + " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + + while (task.getEndTime() == null) { + try { + Thread.sleep(500); Review Comment: We can't have sleep in the prod code like this, that too when the code is sequential, if it is null, just drop this task in this iteration and we will catch this in next iteration ########## itests/hive-unit/pom.xml: ########## @@ -212,6 +212,11 @@ <artifactId>junit</artifactId> <scope>test</scope> </dependency> + <dependency> + <groupId>io.opentelemetry</groupId> + <artifactId>opentelemetry-exporter-otlp</artifactId> + <version>${otel.version}</version> + </dependency> Review Comment: Why are we adding in itest? We are only having changes in the prod code ########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + String queryID = lQuery.getQueryDisplay().getQueryId(); + + //In case of live query previously encountered in past loops + if(queryIdToSpanMap.containsKey(queryID)){ + Span rootspan = queryIdToSpanMap.get(queryID); + + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if(task.getReturnValue() != null && (queryIdToTasksMap.get(queryID) == null || !queryIdToTasksMap.get(queryID).contains(task.getTaskId()))){ + queryIdToTasksMap.get(queryID).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID+ " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + while (task.getEndTime() == null){ + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + } else { + // In case of live queries being seen for first time + Span rootspan = tracer.spanBuilder(queryID + " - live") + .startSpan(); + List<String> completedTasks = new ArrayList<>(); + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if (task.getReturnValue() != null) { + completedTasks.add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID + " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + + while (task.getEndTime() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + + if(rootspan != null){ + queryIdToSpanMap.put(queryID,rootspan); + queryIdToTasksMap.put(queryID,completedTasks); + } + + } + } + + + for (QueryInfo hQuery : historicalQueries) { + + //For queries that were live till last loop but have ended before start of this loop + if(queryIdToSpanMap.containsKey(hQuery.getQueryDisplay().getQueryId())){ + String hQueryId = hQuery.getQueryDisplay().getQueryId(); + Span rootspan = queryIdToSpanMap.get(hQueryId); Review Comment: Change to ``` String hQueryId = hQuery.getQueryDisplay().getQueryId(); Span rootspan = queryIdToSpanMap.get(hQueryId); if (rootspan != null) { ``` ########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); Review Comment: Why is there are sleep here? ########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + String queryID = lQuery.getQueryDisplay().getQueryId(); + + //In case of live query previously encountered in past loops + if(queryIdToSpanMap.containsKey(queryID)){ + Span rootspan = queryIdToSpanMap.get(queryID); + + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if(task.getReturnValue() != null && (queryIdToTasksMap.get(queryID) == null || !queryIdToTasksMap.get(queryID).contains(task.getTaskId()))){ + queryIdToTasksMap.get(queryID).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID+ " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + while (task.getEndTime() == null){ + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } Review Comment: this is same as below, refactor into a method ########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + String queryID = lQuery.getQueryDisplay().getQueryId(); + + //In case of live query previously encountered in past loops + if(queryIdToSpanMap.containsKey(queryID)){ + Span rootspan = queryIdToSpanMap.get(queryID); + + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if(task.getReturnValue() != null && (queryIdToTasksMap.get(queryID) == null || !queryIdToTasksMap.get(queryID).contains(task.getTaskId()))){ + queryIdToTasksMap.get(queryID).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID+ " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + while (task.getEndTime() == null){ + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + } else { + // In case of live queries being seen for first time + Span rootspan = tracer.spanBuilder(queryID + " - live") + .startSpan(); + List<String> completedTasks = new ArrayList<>(); + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if (task.getReturnValue() != null) { + completedTasks.add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID + " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + + while (task.getEndTime() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + + if(rootspan != null){ Review Comment: if which case `rootspan` will be `null` here? ########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + String queryID = lQuery.getQueryDisplay().getQueryId(); + + //In case of live query previously encountered in past loops + if(queryIdToSpanMap.containsKey(queryID)){ + Span rootspan = queryIdToSpanMap.get(queryID); + + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if(task.getReturnValue() != null && (queryIdToTasksMap.get(queryID) == null || !queryIdToTasksMap.get(queryID).contains(task.getTaskId()))){ + queryIdToTasksMap.get(queryID).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID+ " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + while (task.getEndTime() == null){ + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + } else { + // In case of live queries being seen for first time + Span rootspan = tracer.spanBuilder(queryID + " - live") + .startSpan(); + List<String> completedTasks = new ArrayList<>(); + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if (task.getReturnValue() != null) { + completedTasks.add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID + " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + + while (task.getEndTime() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + + if(rootspan != null){ + queryIdToSpanMap.put(queryID,rootspan); + queryIdToTasksMap.put(queryID,completedTasks); + } + + } + } + + + for (QueryInfo hQuery : historicalQueries) { + + //For queries that were live till last loop but have ended before start of this loop + if(queryIdToSpanMap.containsKey(hQuery.getQueryDisplay().getQueryId())){ + String hQueryId = hQuery.getQueryDisplay().getQueryId(); + Span rootspan = queryIdToSpanMap.get(hQueryId); + for (QueryDisplay.TaskDisplay task : hQuery.getQueryDisplay().getTaskDisplays()) { + if(queryIdToTasksMap.get(hQueryId) == null || !queryIdToTasksMap.get(hQueryId).contains(task.getTaskId())){ Review Comment: can we use ``rootspan`` instead of ``queryIdToTasksMap.get(hQueryId)`` ########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + String queryID = lQuery.getQueryDisplay().getQueryId(); + + //In case of live query previously encountered in past loops + if(queryIdToSpanMap.containsKey(queryID)){ + Span rootspan = queryIdToSpanMap.get(queryID); + + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if(task.getReturnValue() != null && (queryIdToTasksMap.get(queryID) == null || !queryIdToTasksMap.get(queryID).contains(task.getTaskId()))){ + queryIdToTasksMap.get(queryID).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID+ " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + while (task.getEndTime() == null){ + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + } else { + // In case of live queries being seen for first time + Span rootspan = tracer.spanBuilder(queryID + " - live") + .startSpan(); + List<String> completedTasks = new ArrayList<>(); + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if (task.getReturnValue() != null) { + completedTasks.add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID + " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + + while (task.getEndTime() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + + if(rootspan != null){ + queryIdToSpanMap.put(queryID,rootspan); + queryIdToTasksMap.put(queryID,completedTasks); + } + + } + } + + + for (QueryInfo hQuery : historicalQueries) { + + //For queries that were live till last loop but have ended before start of this loop + if(queryIdToSpanMap.containsKey(hQuery.getQueryDisplay().getQueryId())){ + String hQueryId = hQuery.getQueryDisplay().getQueryId(); + Span rootspan = queryIdToSpanMap.get(hQueryId); + for (QueryDisplay.TaskDisplay task : hQuery.getQueryDisplay().getTaskDisplays()) { + if(queryIdToTasksMap.get(hQueryId) == null || !queryIdToTasksMap.get(hQueryId).contains(task.getTaskId())){ + queryIdToTasksMap.get(hQueryId).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(hQueryId+ " - " + task.getTaskId() + " - completed").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + + //Update the rootSpan name & attributes before ending it + queryIdToSpanMap.get(hQueryId).updateName(hQueryId + " - completed"); + queryIdToSpanMap.get(hQueryId).setAllAttributes(addQueryAttributes(hQuery)); + queryIdToSpanMap.get(hQueryId).end(); + + queryIdToSpanMap.remove(hQueryId); + queryIdToTasksMap.remove(hQueryId); + + historicalQueryId.add(hQueryId); + } + + //For queries that were already either before OTEL service started or in between OTEL loops + if (!historicalQueryId.contains(hQuery.getQueryDisplay().getQueryId())) { + historicalQueryId.add(hQuery.getQueryDisplay().getQueryId()); + Span rootSpan = tracer.spanBuilder(hQuery.getQueryDisplay().getQueryId() + " - completed") + .startSpan(); + for (QueryDisplay.TaskDisplay task : hQuery.getQueryDisplay().getTaskDisplays()) { + Context parentContext = Context.current().with(rootSpan); + Span currSpan = tracer.spanBuilder(hQuery.getQueryDisplay().getQueryId()+ " - " + task.getTaskId() + " - completed").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + rootSpan.setAllAttributes(addQueryAttributes(hQuery)).end(); + } + Review Comment: Your `historicalQueryId` will grow infinitely, you need to reset it the only ones returned by `` List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos();`` This would return say only 25 (by default) but you would be storing it infinitely which will ultimately lead to OOM on the HS2 side ########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + String queryID = lQuery.getQueryDisplay().getQueryId(); + + //In case of live query previously encountered in past loops + if(queryIdToSpanMap.containsKey(queryID)){ + Span rootspan = queryIdToSpanMap.get(queryID); + + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if(task.getReturnValue() != null && (queryIdToTasksMap.get(queryID) == null || !queryIdToTasksMap.get(queryID).contains(task.getTaskId()))){ + queryIdToTasksMap.get(queryID).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID+ " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + while (task.getEndTime() == null){ + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + } else { + // In case of live queries being seen for first time + Span rootspan = tracer.spanBuilder(queryID + " - live") + .startSpan(); + List<String> completedTasks = new ArrayList<>(); + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if (task.getReturnValue() != null) { + completedTasks.add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID + " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + + while (task.getEndTime() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + + if(rootspan != null){ + queryIdToSpanMap.put(queryID,rootspan); + queryIdToTasksMap.put(queryID,completedTasks); + } + + } + } + + + for (QueryInfo hQuery : historicalQueries) { + + //For queries that were live till last loop but have ended before start of this loop + if(queryIdToSpanMap.containsKey(hQuery.getQueryDisplay().getQueryId())){ + String hQueryId = hQuery.getQueryDisplay().getQueryId(); + Span rootspan = queryIdToSpanMap.get(hQueryId); + for (QueryDisplay.TaskDisplay task : hQuery.getQueryDisplay().getTaskDisplays()) { + if(queryIdToTasksMap.get(hQueryId) == null || !queryIdToTasksMap.get(hQueryId).contains(task.getTaskId())){ + queryIdToTasksMap.get(hQueryId).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(hQueryId+ " - " + task.getTaskId() + " - completed").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); Review Comment: I don't think we need to store `currSpan` we can chain the calls ``` tracer.spanBuilder(hQueryId + " - " + task.getTaskId() + " - completed") .setParent(parentContext).setAllAttributes(addTaskAttributes(task)) .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan() .end(task.getEndTime(), TimeUnit.MILLISECONDS); ``` ########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + String queryID = lQuery.getQueryDisplay().getQueryId(); + + //In case of live query previously encountered in past loops + if(queryIdToSpanMap.containsKey(queryID)){ + Span rootspan = queryIdToSpanMap.get(queryID); + + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if(task.getReturnValue() != null && (queryIdToTasksMap.get(queryID) == null || !queryIdToTasksMap.get(queryID).contains(task.getTaskId()))){ + queryIdToTasksMap.get(queryID).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID+ " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + while (task.getEndTime() == null){ + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + } else { + // In case of live queries being seen for first time + Span rootspan = tracer.spanBuilder(queryID + " - live") + .startSpan(); + List<String> completedTasks = new ArrayList<>(); + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if (task.getReturnValue() != null) { + completedTasks.add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID + " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + + while (task.getEndTime() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + + if(rootspan != null){ + queryIdToSpanMap.put(queryID,rootspan); + queryIdToTasksMap.put(queryID,completedTasks); + } + + } + } + + + for (QueryInfo hQuery : historicalQueries) { + + //For queries that were live till last loop but have ended before start of this loop + if(queryIdToSpanMap.containsKey(hQuery.getQueryDisplay().getQueryId())){ + String hQueryId = hQuery.getQueryDisplay().getQueryId(); + Span rootspan = queryIdToSpanMap.get(hQueryId); + for (QueryDisplay.TaskDisplay task : hQuery.getQueryDisplay().getTaskDisplays()) { + if(queryIdToTasksMap.get(hQueryId) == null || !queryIdToTasksMap.get(hQueryId).contains(task.getTaskId())){ + queryIdToTasksMap.get(hQueryId).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(hQueryId+ " - " + task.getTaskId() + " - completed").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + + //Update the rootSpan name & attributes before ending it + queryIdToSpanMap.get(hQueryId).updateName(hQueryId + " - completed"); + queryIdToSpanMap.get(hQueryId).setAllAttributes(addQueryAttributes(hQuery)); + queryIdToSpanMap.get(hQueryId).end(); Review Comment: Does this work instead ``` //Update the rootSpan name & attributes before ending it rootspan.updateName(hQueryId + " - completed").setAllAttributes(addQueryAttributes(hQuery)).end(); ``` ########## service/src/java/org/apache/hive/service/servlet/OTELExporter.java: ########## @@ -0,0 +1,229 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.service.servlet; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import com.google.common.base.Joiner; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.internal.AttributesMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hadoop.hive.ql.QueryDisplay; +import org.apache.hadoop.hive.ql.QueryInfo; +import org.apache.hive.service.cli.operation.OperationManager; +import org.apache.hive.service.cli.session.SessionManager; + +public class OTELExporter extends Thread { + private static final String INSTRUMENTATION_NAME = OTELExporter.class.getName(); + private static final Logger LOG = LoggerFactory.getLogger(OTELExporter.class); + private final OperationManager operationManager; + private Set<String> historicalQueryId; + private final long frequency; + private final Tracer tracer; + private Map<String, Span> queryIdToSpanMap; + private Map<String, List<String>> queryIdToTasksMap; + + public OTELExporter(OpenTelemetry openTelemetry, SessionManager sessionManager, long frequency) { + this.tracer = openTelemetry.getTracer(INSTRUMENTATION_NAME); + this.operationManager = sessionManager.getOperationManager(); + this.historicalQueryId = new HashSet<>(); + this.frequency = frequency; + this.queryIdToSpanMap = new HashMap<>(); + this.queryIdToTasksMap = new HashMap<>(); + } + + @Override + public void run() { + while (true) { + exposeMetricsToOTEL(); + try { + Thread.sleep(frequency); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + public void exposeMetricsToOTEL() { + List<QueryInfo> liveQueries = operationManager.getLiveQueryInfos(); + List<QueryInfo> historicalQueries = operationManager.getHistoricalQueryInfos(); + + LOG.debug("Found {} liveQueries and {} historicalQueries", liveQueries.size(), historicalQueries.size()); + + for (QueryInfo lQuery: liveQueries){ + + while (lQuery.getQueryDisplay() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + String queryID = lQuery.getQueryDisplay().getQueryId(); + + //In case of live query previously encountered in past loops + if(queryIdToSpanMap.containsKey(queryID)){ + Span rootspan = queryIdToSpanMap.get(queryID); + + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if(task.getReturnValue() != null && (queryIdToTasksMap.get(queryID) == null || !queryIdToTasksMap.get(queryID).contains(task.getTaskId()))){ + queryIdToTasksMap.get(queryID).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID+ " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + while (task.getEndTime() == null){ + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + } else { + // In case of live queries being seen for first time + Span rootspan = tracer.spanBuilder(queryID + " - live") + .startSpan(); + List<String> completedTasks = new ArrayList<>(); + for (QueryDisplay.TaskDisplay task : lQuery.getQueryDisplay().getTaskDisplays()) { + if (task.getReturnValue() != null) { + completedTasks.add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(queryID + " - " + task.getTaskId() + " - live").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + + while (task.getEndTime() == null) { + try { + Thread.sleep(500); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + + if(rootspan != null){ + queryIdToSpanMap.put(queryID,rootspan); + queryIdToTasksMap.put(queryID,completedTasks); + } + + } + } + + + for (QueryInfo hQuery : historicalQueries) { + + //For queries that were live till last loop but have ended before start of this loop + if(queryIdToSpanMap.containsKey(hQuery.getQueryDisplay().getQueryId())){ + String hQueryId = hQuery.getQueryDisplay().getQueryId(); + Span rootspan = queryIdToSpanMap.get(hQueryId); + for (QueryDisplay.TaskDisplay task : hQuery.getQueryDisplay().getTaskDisplays()) { + if(queryIdToTasksMap.get(hQueryId) == null || !queryIdToTasksMap.get(hQueryId).contains(task.getTaskId())){ + queryIdToTasksMap.get(hQueryId).add(task.getTaskId()); + Context parentContext = Context.current().with(rootspan); + Span currSpan = tracer.spanBuilder(hQueryId+ " - " + task.getTaskId() + " - completed").setParent(parentContext).setAllAttributes(addTaskAttributes(task)) + .setStartTimestamp(task.getBeginTime(), TimeUnit.MILLISECONDS).startSpan(); + currSpan.end(task.getEndTime(), TimeUnit.MILLISECONDS); + } + } + + //Update the rootSpan name & attributes before ending it + queryIdToSpanMap.get(hQueryId).updateName(hQueryId + " - completed"); + queryIdToSpanMap.get(hQueryId).setAllAttributes(addQueryAttributes(hQuery)); + queryIdToSpanMap.get(hQueryId).end(); + + queryIdToSpanMap.remove(hQueryId); + queryIdToTasksMap.remove(hQueryId); + + historicalQueryId.add(hQueryId); + } + + //For queries that were already either before OTEL service started or in between OTEL loops + if (!historicalQueryId.contains(hQuery.getQueryDisplay().getQueryId())) { + historicalQueryId.add(hQuery.getQueryDisplay().getQueryId()); Review Comment: It is a Set, you don't need to do contains then add, you can directly add and check the return value, if it is true, means the element wasn't there, if false, means the element was already there -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org For additional commands, e-mail: gitbox-h...@hive.apache.org