发布网友 发布时间:2022-04-07 16:47
共3个回答
懂视网 时间:2022-04-07 21:08
然后我们和操作普通数据库一样,先加载驱动,然后建立连接,接着创建一个statement,然后执行查询,然会结果集。代码如下(一定要写对sql语句,要仔细,下面的需要注意的地方我已经标示出来了:)
package playHive; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; public class HiveJdbcClient { private final static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver"; private final static String localFilePath="/home/hadoop/test/hive/test.txt"; private final static String hdfsFilePath="hdfs://192.168.0.1:9000/user/hadoop/"; private final static String tableName="testHiveDriverTable"; private final static String partitionName="testHiveDriverPartition"; private final static String bucketName="testHiveDriverBucket"; private static String sql = ""; private static Connection connection ; private static Statement statement; private static ResultSet resultSet; static { try { Class.forName(driverName); } catch (ClassNotFoundException e) { System.out.println(e); System.exit(1); } try { connection = DriverManager.getConnection( "jdbc:hive://192.168.0.1:50000/default", "hive", "hadoop"); //首先要打开hiveserver服务:hive --service hiveserver statement = connection.createStatement(); } catch (Exception e) { e.printStackTrace(); } } public static void createTable() throws SQLException { sql = "drop table " + tableName; System.out.println("delete table****"); statement.executeQuery(sql); sql = "create table " + tableName + " (key int,value String)" + " row format delimited fields terminated by ' '"; System.out.println("create table:"+tableName); statement.executeQuery(sql); showTable(); describeTable(); } public static void showTable() throws SQLException { sql = "show tables " + tableName; System.out.println("show table:"+tableName); resultSet=statement.executeQuery(sql); while(resultSet.next()){ System.out.println(resultSet.getString(1)); } } public static void describeTable() throws SQLException{ sql="describe "+tableName; System.out.println("describe table:"+tableName); resultSet=statement.executeQuery(sql); while(resultSet.next()){ System.out.println(resultSet.getString(1)+" "+resultSet.getString(2)); } } public static void loadDataToTable(boolean isLocal) throws SQLException{ sql=isLocal?"load data local inpath '"+localFilePath+"' overwrite into table "+tableName: "load data inpath '"+hdfsFilePath+"' overwrite into table "+tableName; System.out.println("load data into table:"+tableName); statement.executeQuery(sql); } public static void queryTable() throws SQLException{ sql="select * from "+tableName; System.out.println("execute query:select *query"); resultSet=statement.executeQuery(sql); while(resultSet.next()){ System.out.println(resultSet.getString(1)+' '+resultSet.getString(2)); } } public static void regularTableQuery() throws SQLException{ //sql="select count(1) from "+tableName+";"; sql="select key,max(n) from (select key,count(value) as n from "+tableName+" group by key)sbuq group by key"; System.out.println("execute query:"); resultSet =statement.executeQuery(sql); while(resultSet.next()){ System.out.println(resultSet.getString(1)); } } public static void createPartition() throws SQLException{ sql="drop table "+partitionName; System.out.println("delete partition"); statement.execute(sql); sql="create table "+partitionName+"(key int) partitioned by (value string) " + "row format delimited fields terminated by ' '"; System.out.println("create partition:"+partitionName); statement.execute(sql); } public static void insertDataToPartition() throws SQLException{ //这里一定是select key from "+tableName;key不可以写成value,否则不能插入进去,插入的值将会是null sql="insert overwrite table "+partitionName+" partition (value='qinqin') select key from "+tableName; statement.execute(sql); System.out.println("insert data to "+partitionName+" success"); } public static void selectFromPartition() throws SQLException{ sql="select * from "+partitionName+" where value='qinqin'"; System.out.println("query in partition:select * in "+partitionName); resultSet=statement.executeQuery(sql); while(resultSet.next()){ System.out.println(resultSet.getString(1)); } } public static void createBucket() throws SQLException{ sql="drop table "+bucketName; System.out.println("delete bucket"); statement.executeQuery(sql); sql="create table "+bucketName+"(key int,value string) clustered by(key) into 3 buckets " + "row format delimited fields terminated by ' '"; System.out.println("create bucket:"+bucketName); statement.execute(sql); } public static void insertDataToBucket() throws SQLException{ sql="insert overwrite table "+bucketName+" select key,value from "+tableName; System.out.println("insert data into bucket:"+bucketName); statement.executeQuery(sql); } public static void selectFromBucket() throws SQLException{ sql="select * from "+bucketName+" tablesample(bucket 1 out of 3 on key)"; System.out.println("select from bucket:"+bucketName); resultSet=statement.executeQuery(sql); while(resultSet.next()){ System.out.println(resultSet.getString(1)+" " +resultSet.getString(2)); } } public static void closeConnection(){ try { connection.close(); } catch (SQLException e) { e.printStackTrace(); System.exit(1); } } public static void tableOperation() throws SQLException{ HiveJdbcClient.createTable(); HiveJdbcClient.loadDataToTable(true); HiveJdbcClient.queryTable(); HiveJdbcClient.regularTableQuery(); } public static void partitionOperation() throws SQLException{ HiveJdbcClient.createPartition(); HiveJdbcClient.insertDataToPartition(); HiveJdbcClient.selectFromPartition(); } public static void bucketOperation() throws SQLException{ HiveJdbcClient.createBucket(); HiveJdbcClient.insertDataToBucket(); HiveJdbcClient.selectFromBucket(); } public static void main(String[] args) { try { System.out.println("table Operation***********************************************"); //HiveJdbcClient.tableOperation(); System.out.println("partition Operation***********************************************"); HiveJdbcClient.partitionOperation(); System.out.println("bucket Operation***********************************************"); //HiveJdbcClient.bucketOperation(); HiveJdbcClient.closeConnection(); } catch (SQLException e) { e.printStackTrace(); } } }
hive通过jdbc创建表,分区,桶
标签:hadoop hive jdbc 数据库 sql
热心网友 时间:2022-04-07 18:16
可以呀,根据自己的需求指定就行了!但是一般情况用string的!
热心网友 时间:2022-04-07 19:34
我分区的类型就是int 按照year month day 分的,可行