Hi Xiaoxiang,
Sir & Madames,

I use the following code to query the cube via API but I cannot use the
result as a dataframe, could you suggest a way to do that because it is
very important for our project.

Thanks and best regards

===================================

import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._

object APICaller {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("APICaller")
      .master("local[*]")
      .getOrCreate()

    import spark.implicits._

    val username = "namdd"
    val password = "eer123"
    val urlString = "http://localhost:7070/kylin/api/query";
    val project = "learn_kylin"
    val query = "select count(*) from HIVE_DWH_STANDARD.factuserEvent"

    val response: String = callAPI(urlString, username, password, project,
query)

    // Convert response to DataFrame
    val df = spark.read.json(Seq(response).toDS())

    // Show DataFrame
    df.show()

    // Stop Spark session
    spark.stop()
  }

  def callAPI(url: String, username: String, password: String, project:
String, query: String): String = {
    val encodedAuth =
java.util.Base64.getEncoder.encodeToString(s"$username:$password".getBytes)

    val connection = scalaj.http.Http(url)
      .postData(s"""{"project": "$project", "sql": "$query"}""")
      .header("Content-Type", "application/json")
      .header("Accept", "application/json")
      .auth(username, password)
      .asString

    if (connection.isError)
      throw new RuntimeException(s"Error calling API: ${connection.body}")

    connection.body
  }
}

Reply via email to