package com.sheng.hive;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.precedenceAmpersandExpression_return;
/**
* Hive JDBC操作
*/
public class HiveJDBC {
public static final String DRIVER_NAME = "org.apache.hive.jdbc.HiveDriver";
public static final String URL = "jdbc:hive2://HoodpNode3:2181,HoodpNode2:2181,HoodpNode4:2181/default;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2_zk";
//public static final String URL ="jdbc:hive2://hpNode1:10000/default";
public static final String USER_NAME = "zx";
public static final String PASSWORD = "123456";
private static Connection conn = null;
private static Statement stmt = null;
private static ResultSet rs = null;
public static void main(String[] args) throws Exception {
init();
//createDatabase("myhive");
//showDatabases();
//dropDatabase("myhive");
//showTables("mystudent");
//查询表结构
//descTable("mystudent","student");
//dropTable("mystudent","part_test_2 ");
//createTable("mystudent","part_test_2 ");
countData("mystudent","student");
destory();
}
/**
* 加载驱动,创建连接
* @throws ClassNotFoundException
*
* @throws Exception
*/
private static void init() throws ClassNotFoundException {
try {
Class.forName(DRIVER_NAME);
conn = DriverManager.getConnection(URL, USER_NAME, PASSWORD);
stmt = conn.createStatement();
} catch (SQLException e) {
e.printStackTrace();
}
}
/**
* 创建数据库
*
* @param databaseName
* @throws Exception
*/
public static void createDatabase(String databaseName) throws Exception {
String sql = "create database "+databaseName;
System.out.println("Running: " + sql);
stmt.execute(sql);
System.out.println("创建成功:"+databaseName);
}
/**
* 查询所有数据库
*
* @return
*/
public static void showDatabases()throws Exception {
String sql = "show databases";
System.out.println("Running: " + sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getString(1));
}
}
/**
* 删除数据库
*
* @param databaseName
*/
public static void dropDatabase(String databaseName) throws Exception{
String sql = "drop database if exists "+databaseName;
System.out.println(sql);
System.out.println("Running: " + sql);
stmt.execute(sql);
System.out.println("删除数据库成功"+databaseName);
}
/**
* 创建表
*
* @param createTableSql
*/
public static void createTable(String databaseName,String createTableSql)throws Exception {
String usesql = " use "+databaseName;
System.out.println("Running: " + usesql);
stmt.execute(usesql);
String sql = "create table "+createTableSql+"(\n" +
"empno int,\n" +
"ename string,\n" +
"job string,\n" +
"mgr int,\n" +
"hiredate string,\n" +
"sal double,\n" +
"comm double,\n" +
"deptno int\n" +
")\n" +
"row format delimited fields terminated by '\\t'";
System.out.println("Running: " + sql);
stmt.execute(sql);
System.out.println("创建"+createTableSql+"成功");
}
/**
* 查询所有表
*
* @return
*/
public static void showTables(String databaseName)throws Exception {
String usesql = " use "+databaseName;
System.out.println("Running: " + usesql);
stmt.execute(usesql);
String sql = " show tables";
System.out.println("Running: " + sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getString(1));
}
}
/**
* 查看表结构
*
* @param databaseName
* @param tableName
* @return
*/
public static void descTable(String databaseName, String tableName)throws Exception {
String usesql = " use "+databaseName;
System.out.println("Running: " + usesql);
stmt.execute(usesql);
String sql = "desc "+tableName;
System.out.println("Running: " + sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getString(1) + "\t" + rs.getString(2));
}
}
/**
* 加载数据
*
*/
public static void loadData() throws Exception {
String filePath = "/home/hadoop/data/emp.txt";
String sql = "load data local inpath '" + filePath + "' overwrite into table emp";
System.out.println("Running: " + sql);
stmt.execute(sql);
}
/**
* 查询数据
*
* @param selectSql
* @return
*
*/
public static List<String> selectData(String selectSql) {
return null;
}
/**
* 删除数据库表
*
* @param databaseName
* @param tableName
*/
public static void dropTable(String databaseName, String tableName)throws Exception {
String usesql = " use "+databaseName;
System.out.println("Running: " + usesql);
stmt.execute(usesql);
String sql = "drop table if exists "+tableName;
System.out.println("Running: " + sql);
stmt.execute(sql);
System.out.println("删除"+tableName+"成功");
}
// 统计查询(会运行mapreduce作业)
public static void countData(String databasesName,String tableName) throws Exception {
String usesql = " use "+databasesName;
System.out.println("Running: " + usesql);
stmt.execute(usesql);
String sql = "select count(1) from "+tableName+"";
System.out.println("Running: " + sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println("查询总条数为:"+rs.getInt(1) );
}
}
/**
* 释放资源
*/
private static void destory() {
try {
if (rs != null) {
rs.close();
}
if (stmt != null) {
stmt.close();
}
if (conn != null) {
conn.close();
}
} catch (SQLException e) {
System.out.println(e);
// Object object = AbnormalUtils.getAbnormal(e);
// System.err.println(object);
}
}
}
Hive API操作(亲测有效)
猜你喜欢
转载自blog.csdn.net/weixin_43599377/article/details/103812575
今日推荐
周排行