Hadoop 如何在Hive-Site.xml文件中为配置单元元存储连接设置配置?

Hadoop 如何在Hive-Site.xml文件中为配置单元元存储连接设置配置?,hadoop,hive,cloudera,impala,metastore,Hadoop,Hive,Cloudera,Impala,Metastore,我想使用java代码连接MetaStore。我不知道如何在Hive-Site.xml文件中设置配置设置,也不知道在哪里发布Hive-Site.xml文件。请帮忙 import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; import org.apache.hadoop.fs.Path; import org.apache.had

我想使用java代码连接MetaStore。我不知道如何在Hive-Site.xml文件中设置配置设置,也不知道在哪里发布Hive-Site.xml文件。请帮忙

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;

public class HiveMetastoreJDBCTest {

    public static void main(String[] args) throws Exception {

        Connection conn = null;
        try {
            HiveConf conf = new HiveConf();
            conf.addResource(new Path("file:///path/to/hive-site.xml"));
            Class.forName(conf.getVar(ConfVars.METASTORE_CONNECTION_DRIVER));
            conn = DriverManager.getConnection(
                    conf.getVar(ConfVars.METASTORECONNECTURLKEY),
                    conf.getVar(ConfVars.METASTORE_CONNECTION_USER_NAME),
                    conf.getVar(ConfVars.METASTOREPWD));

            Statement st = conn.createStatement();
            ResultSet rs = st.executeQuery(
                "select t.tbl_name, s.location from tbls t " +
                "join sds s on t.sd_id = s.sd_id");
            while (rs.next()) {
                System.out.println(rs.getString(1) + " : " + rs.getString(2));
            }
        }        

    }
}

在您的配置单元站点.xml中添加以下行:

<property>
  <name>hive.metastore.local</name>
  <value>true</value>
</property>
<property>
  <name>javax.jdo.option.ConnectionURL</name>
  <value>jdbc:mysql://localhost:3306/hive</value>
</property>
<property>
  <name>javax.jdo.option.ConnectionUserName</name>
  <value>hiveuser</value>
</property>
<property>
  <name>javax.jdo.option.ConnectionPassword</name>
  <value>hivepass</value>
</property>

hive.metastore.local
真的
javax.jdo.option.ConnectionURL
jdbc:mysql://localhost:3306/hive
javax.jdo.option.ConnectionUserName
蜂巢用户
javax.jdo.option.ConnectionPassword
蜂群通行证
jdbc中:mysql://localhost:3306/hive
3306
是默认的mysql端口
hive
是hive metastore的mysql数据库名称。 将
hiveuser
更改为mysql配置单元用户名,将
hivepass
更改为mysql配置单元密码

如果尚未在mysql中为配置单元元存储创建数据库,请在终端中执行此步骤:

mysql-u root-p

输入您的mysql根密码

mysql>创建数据库配置单元

mysql>创建由“hivepass”标识的用户“hiveuser”@“%”

mysql>将**上的所有内容授予由“hivepass”标识的“hiveuser”@localhost

mysql>刷新权限

这里,
hiveuser
hivepass
分别是您为hive metastore提供的用户名和密码

注意:您需要在$HIVE\u HOME/lib中安装mysql jdbc connector.jar,并且 $HADOOP\u HOME/lib


关于Hive-site.xml,这里是来自我的测试机器的示例。这用于在本地主机上安装MySql服务器的情况下设置配置单元元存储

<configuration>
<property>
 <name>javax.jdo.option.ConnectionURL</name>
 <value>jdbc:mysql://localhost/metastore?createDatabaseIfNotExist=true</value>
 <description>metadata is stored in a MySQL server</description>
</property>
<property>
 <name>javax.jdo.option.ConnectionDriverName</name>
 <value>com.mysql.jdbc.Driver</value>
 <description>MySQL JDBC driver class</description>
</property>
<property>
 <name>javax.jdo.option.ConnectionUserName</name>
 <value>hive</value>
 <description>user name for connecting to mysql server </description>
</property>
<property>
 <name>javax.jdo.option.ConnectionPassword</name>
 <value>123456</value>
 <description>password for connecting to mysql server </description>
</property>
</configuration>

谢谢你宝贵的回复。但是我正在尝试使用java代码而不是MySql连接Hive Metastore。如果你有密码,请告诉我。
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

public class WriteToHive {
    private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
    static Connection con;
    static Statement stmt;

    public WriteToHive() throws SQLException, ClassNotFoundException, Exception {
        try {
            Class.forName(driverName);
        } catch (ClassNotFoundException e){
            e.printStackTrace();
            throw new ClassNotFoundException("No JDBC Hive Driver found");
            //System.exit(1);
        } catch (Exception e) {
            e.printStackTrace();
            throw new Exception(e);
            //System.exit(1);
        }

        con = DriverManager.getConnection("jdbc:hive://localhost:10000/rajen","","");
        stmt = con.createStatement();
    }

    public static void main(String[] args) throws SQLException {
        try {
            Class.forName(driverName);
        } catch (ClassNotFoundException e){
            e.printStackTrace();
            System.exit(1);
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(1);
        }
        con = DriverManager.getConnection("jdbc:hive://localhost:10000/rajen","","");
        stmt = con.createStatement();
        //Connection con = DriverManager.getConnection("jdbc:hive://","","");
        String tableName = "company_mas_hive_eclipse_trial";

        ResultSet res = stmt.executeQuery("use rajen");

        String sql = "DROP TABLE IF EXISTS " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);

        sql = "CREATE TABLE IF NOT EXISTS rajen.company_mas_hive_eclipse_trial (" +
              "Name string," + 
              "dateofincorporation string," + 
              "country string)" +
              "ROW FORMAT DELIMITED FIELDS TERMINATED BY \",\"";
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);

        sql = "show tables '" + tableName + "'";
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);

        if (res.next()){
            System.out.println(res.getString(1));
        }

        sql = "describe " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);
        System.out.println("=========================================");
        while (res.next()) {
          System.out.println(res.getString(1) + "\t" + res.getString(2));
        }
        System.out.println("=========================================");

        // load data into table
        // NOTE: filepath has to be local to the hive server
        // NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
        String filepath = "/home/seo/Refrence_Doc/sampledata/companymas"; //"/rajen/companymas";
        sql = "load data local inpath '" + filepath + "' into table " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);

        // load data into table
        // NOTE: filepath has to be local to the hive server
        // NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
        filepath = "/rajen/companymas";
        sql = "load data inpath '" + filepath + "' into table " + tableName;
        System.out.println("Running: " + sql);
        //res = stmt.executeQuery(sql);

        // select * query
        sql = "select * from " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);
        while (res.next()) {
            System.out.println(String.valueOf(res.getString(1)) + "\t" + res.getString(2));
        }

        // regular hive query
        sql = "select count(*) from " + tableName;
        System.out.println("Running: " + sql);
        res = stmt.executeQuery(sql);
        while (res.next()) {
            System.out.println(res.getString(1));
        }
    }

    public void createTable(String def, String dbname) throws SQLException{
        @SuppressWarnings("unused")
        ResultSet res = stmt.executeQuery("use " + dbname);
        stmt.executeQuery(def);
    }

    public static void loadData(String filepath, String tableName) throws SQLException{
        stmt.executeQuery("load data local inpath '" + filepath + "' into table " + tableName);
    }
}