java访问hive的方式

保证hive --service hiveserver

1、jdbc方式

hive的lib下的jar全部都包含进去,另外要包含hadoop-0.20.2-core.jar

/**
 *  通过jdbc方式访问hive
 */
publicclass HiveClient {
 
    privatestatic Connection conn = null;
 
    privatestatic Connection connToMysql = null;
 
    privateHiveClient() {
 
    }
 
    publicstatic Connection GetHiveConn() throwsSQLException {
 
        if(conn == null) {
 
            try{
 
                Class.forName("org.apache.hadoop.hive.jdbc.HiveDriver");
 
            }catch(ClassNotFoundException e) {
 
                e.printStackTrace();
 
                System.exit(1);
 
            }
 
            conn = DriverManager.getConnection(
 
                    "jdbc:hive://test01:5008/default","","");   //5008是启动hiveserver时设置的端口,默认是10000
        }
 
        returnconn;
 
    }
 
    publicstatic void closeHive() throwsSQLException {
 
        if(conn != null) {
 
            conn.close();
 
        }
 
    }
    publicstatic void main(String[] args) throwsSQLException {
 
        Connection conn = HiveClient.GetHiveConn();     
 
        Statement stmt = conn.createStatement();
 
        ResultSet rs = stmt.executeQuery("select * from user_device_base limit 1");
 
        while(rs.next()){
 
            System.out.println(rs.getString(1));
 
        }
    }
 
}


2、thrift方式

TSocket transport = newTSocket("test01",5008);
 
transport.setTimeout(1000);
 
TBinaryProtocol protocol = newTBinaryProtocol(transport);
 
Client client = newThriftHive.Client(protocol);
 
transport.open();
 
client.execute("select * from user_device_base limit 1"); // Omitted HQL
 
List<String> rows = null;
 
while((rows = client.fetchN(1000)) != null) {
 
    for(String row : rows) {
 
        System.out.println(row);
 
    }
}
transport.close();


pom dependency

<apache.hadoop.version>1.0.1</apache.hadoop.version>
 
<apache.hive.version>0.9.0</apache.hive.version>
 
 
 
<dependency>
 
    <groupId>org.apache.hive</groupId>
 
    <artifactId>hive-serde</artifactId>
 
    <optional>true</optional>
 
    <version>${apache.hive.version}</version>
 
</dependency>
 
 
<dependency>
 
    <groupId>org.apache.hive</groupId>
 
    <artifactId>hive-common</artifactId>
 
    <version>${apache.hive.version}</version>
 
</dependency>
 
<dependency>
 
    <groupId>org.apache.hive</groupId>
 
    <artifactId>hive-exec</artifactId>
 
    <version>${apache.hive.version}</version>
 
</dependency>
 
<dependency>
 
    <groupId>org.apache.hive</groupId>
 
    <artifactId>hive-jdbc</artifactId>
 
    <version>${apache.hive.version}</version>
 
</dependency>
 
<dependency>
 
    <groupId>org.apache.hadoop</groupId>
 
    <artifactId>hadoop-core</artifactId>
 
    <version>${apache.hadoop.version}</version>
 
</dependency>
 
<dependency>
 
    <groupId>org.apache.hadoop</groupId>
 
    <artifactId>hadoop-test</artifactId>
 
    <version>${apache.hadoop.version}</version>
 
    <scope>provided</scope>
 
</dependency>
 
<dependency>
 
    <groupId>javax.jdo</groupId>
 
    <artifactId>jdo2-api</artifactId>
 
    <version>2.3-eb</version>
 
    <scope>test</scope>
 
</dependency>
 
<!-- Needed forHive unit tests -->
 
<dependency>
 
    <groupId>org.apache.hive</groupId>
 
    <artifactId>hive-cli</artifactId>
 
    <version>${apache.hive.version}</version>
 
    <scope>test</scope>
 
</dependency>


猜你喜欢

转载自wangzejie.iteye.com/blog/2000338