Github user ChinmaySKulkarni commented on a diff in the pull request:

    https://github.com/apache/phoenix/pull/402#discussion_r229565710
  
    --- Diff: phoenix-spark/src/it/java/org/apache/phoenix/spark/SparkUtil.java 
---
    @@ -0,0 +1,87 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + *
    + * http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.phoenix.spark;
    +
    +import com.google.common.base.Joiner;
    +import org.apache.hadoop.conf.Configuration;
    +import org.apache.phoenix.jdbc.PhoenixConnection;
    +import org.apache.phoenix.query.QueryServices;
    +import org.apache.phoenix.util.QueryBuilder;
    +import org.apache.spark.SparkConf;
    +import org.apache.spark.SparkContext;
    +import org.apache.spark.sql.Dataset;
    +import org.apache.spark.sql.Row;
    +import org.apache.spark.sql.SQLContext;
    +import org.apache.spark.sql.execution.SparkPlan;
    +import org.mortbay.log.Log;
    +import scala.Option;
    +import scala.collection.JavaConverters;
    +
    +import java.sql.Connection;
    +import java.sql.ResultSet;
    +import java.sql.SQLException;
    +import java.util.List;
    +
    +public class SparkUtil {
    +
    +    private static volatile SparkContext INSTANCE = null;
    +
    +    public static SparkContext getSparkContext() {
    +        if (INSTANCE == null) {
    +            synchronized (SparkUtil.class) {
    +                if (INSTANCE == null) {
    +                    SparkConf conf = new SparkConf()
    +                            .setAppName("Java Spark Tests")
    +                            .setMaster("local[2]") // 2 threads, some 
parallelism
    +                            .set("spark.ui.showConsoleProgress", 
"false");// Disable printing stage progress
    +                    INSTANCE = new SparkContext(conf);
    +                }
    +            }
    +        }
    +        return INSTANCE;
    +    }
    +
    +    public static ResultSet executeQuery(Connection conn, QueryBuilder 
queryBuilder, String url, Configuration config)
    +            throws SQLException {
    +        SQLContext sqlContext = new 
SQLContext(SparkUtil.getSparkContext());
    --- End diff --
    
    It looks as though `SQLContext` is 
[deprecated](https://spark.apache.org/docs/2.3.0/api/java/org/apache/spark/sql/SQLContext.html#SQLContext-org.apache.spark.SparkContext-).
 Quoting: _Deprecated. Use SparkSession.builder instead. Since 2.0.0._


---

Reply via email to