如何在scala程序中连接配置单元

如何在scala程序中连接配置单元,scala,Scala,我有一个要求,我们必须在scala中调用蜂巢表,它位于spark之外 有没有一种方法可以使用scala编程将配置单元表称为外部spark环境 此示例程序是否有效或需要进行任何更改 import java.io.IOException import scala.util.Try import org.apache.hadoop.hive.cli.CliSessionState import org.apache.hadoop.hive.conf.HiveConf import org.apache

我有一个要求,我们必须在scala中调用蜂巢表,它位于spark之外

有没有一种方法可以使用scala编程将配置单元表称为外部spark环境

此示例程序是否有效或需要进行任何更改

import java.io.IOException
import scala.util.Try
import org.apache.hadoop.hive.cli.CliSessionState
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.ql.Driver
import org.apache.hadoop.hive.ql.session.SessionState

class getData {
  val hiveConf = new HiveConf(classOf[getData])

  private def getDriver: Driver = {
    val driver = new Driver(hiveConf)
    SessionState.start(new CliSessionState(hiveConf))
    driver
  }

  def executeHQL(hql: String): Int = {
    val responseOpt = Try(getDriver.run(hql)).toEither
    val response = responseOpt match {
      case Right(response) => response
      case Left(exception) => throw new Exception(s"${ exception.getMessage }")
    }
    val responseCode = response.getResponseCode
    if (responseCode != 0) {
      val err: String = response.getErrorMessage
      throw new IOException("Failed to execute hql [" + hql + "], error message is: " + err)
    }
    responseCode
  }
}

您可以使用配置单元JDBC连接器从scala程序连接配置单元


您可以使用配置单元JDBC连接器从scala程序连接配置单元

import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;

public class HiveJdbcClient extends App {
    val driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";

    Class.forName(driverName);

    val con = DriverManager.getConnection("jdbc:hive://localhost:10000/default", "", "");
    val stmt = con.createStatement();
    val tableName = "testHiveDriverTable";
    stmt.executeQuery("drop table " + tableName);
    val res = stmt.executeQuery("create table " + tableName + " (key int, value string)");

    // select * query
    val sql = "select * from " + tableName;
    res = stmt.executeQuery(sql);
    while (res.next()) {
      System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
    }

    // regular hive query
    val sql = "select count(1) from " + tableName;
    res = stmt.executeQuery(sql);
    while (res.next()) {
      System.out.println(res.getString(1));
    }
  }
}