码迷,mamicode.com
首页 > 数据库 > 详细

【Hive】JDBC操作

时间:2017-01-19 02:10:18      阅读:352      评论:0      收藏:0      [点我收藏+]

标签:ase   ast   tac   tables   ade   stack   oid   imp   puts   

 1 package com.java.hadoop.hive;
 2  
 3 import java.sql.Connection;
 4 import java.sql.DriverManager;
 5 import java.sql.PreparedStatement;
 6 import java.sql.ResultSet;
 7 import java.sql.SQLException;
 8 import java.sql.Statement;
 9  
10 import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Processor.drop_database;
11 import org.junit.Before;
12 import org.junit.Test;
13  
14 public class TestHive {
15     private Connection connection;
16     private PreparedStatement ps;
17     private ResultSet rs;
18     //创建连接
19     @Before
20     public void getConnection() {
21         try {
22  
23             Class.forName("org.apache.hive.jdbc.HiveDriver");
24             connection = DriverManager.getConnection("jdbc:hive2://192.168.18.130:10000/", "root", "root");
25             System.out.println(connection);
26         } catch (ClassNotFoundException e) {
27             e.printStackTrace();
28         } catch (SQLException e) {
29             e.printStackTrace();
30         }
31     }
32     //关闭连接
33     public void close() {
34         try {
35             if (rs != null) {
36                 rs.close();
37             }
38             if (ps != null) {
39                 ps.close();
40             }
41             if (connection != null) {
42                 connection.close();
43             }
44         } catch (SQLException e) {
45             e.printStackTrace();
46         }
47     }
48  
49     // 创建表
50     @Test
51     public void createTable() {
52         String sql = "create table goods2(id int,name string) row format delimited fields terminated by ‘\t‘ ";
53         try {
54             ps = connection.prepareStatement(sql);
55             ps.execute(sql);
56             close();
57         } catch (SQLException e) {
58             e.printStackTrace();
59         }
60     }
61     // 删除表
62     @Test
63     public void dropTable() {
64         String sql = "drop table goods";
65         try {
66             ps = connection.prepareStatement(sql);
67             ps.execute();
68             close();
69         } catch (SQLException e) {
70             e.printStackTrace();
71         }
72     }
73     //添加数据
74     @Test
75     public void insert() throws SQLException{
76         String sql = "load data inpath ‘/goods.txt‘ into table goods";
77         //记得先在文件系统中上传goods.txt
78         ps = connection.prepareStatement(sql);
79         ps.execute();
80         close();
81     }
82     //查询
83     @Test
84     public void find() throws SQLException {
85         String sql = "select * from goods ";
86         ps = connection.prepareStatement(sql);
87         rs = ps.executeQuery();
88         while (rs.next()) {
89             System.out.println(rs.getObject(1) + "---" + rs.getObject(2));
90         }
91         close();
92     }
93  
94      
95 }
 1 String sql="show tables; select * from test_tb limit 10";
 2 List<String> command = new ArrayList<String>();
 3  
 4 command.add("hive");
 5 command.add("-e");
 6 command.add(sql);
 7  
 8 List<String> results = new ArrayList<String>();
 9 ProcessBuilder hiveProcessBuilder = new ProcessBuilder(command);
10 hiveProcess = hiveProcessBuilder.start();
11  
12 BufferedReader br = new BufferedReader(new InputStreamReader(
13         hiveProcess.getInputStream()));
14 String data = null;
15 while ((data = br.readLine()) != null) {
16     results.add(data);
17 }

 

【Hive】JDBC操作

标签:ase   ast   tac   tables   ade   stack   oid   imp   puts   

原文地址:http://www.cnblogs.com/flydkPocketMagic/p/6298403.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!