再安装过Hadoop+Hive+zookeeper的实验机上
# 将hbase235/lib/下的jar包拷入到hive312/lib/下
# 如果有重复,选择不覆盖
[root@hadoop conf]# cp /opt/soft/hbase235/lib/* /opt/soft/hive312/lib/# 将切换到hbase235/lib目录下
[root@hadoop conf]# cd /opt/soft/hbase235/lib/
# 将hive312/lib/guava-27.0-jre.jar复制到当前目录下
[root@hadoop lib]# cp /opt/soft/hive312/lib/guava-27.0-jre.jar ./# 切换/opt/soft/hive312/conf/目录下
[root@hadoop conf]# cd /opt/soft/hive312/conf/
# 修改 hive-site.xml
[root@hadoop conf]# vim ./hive-site.xml# 添加配置信息
hive.zookeeper.quorum 192.168.95.150
hbase.zookeeper.quorum 192.168.95.150
hive.aux.jars.path file:///opt/soft/hive312/lib/hive-hbase-handler-3.1.2.jar,file:///opt/soft/hive312/lib/zookeeper-3.4.6.jar,file:///opt/soft/hive312/lib/hbase-client-2.3.5.jar,file:///opt/soft/hive312/lib/hbase-common-2.3.5-tests.jar,file:///opt/soft/hive312/lib/hbase-server-2.3.5.jar,file:///opt/soft/hive312/lib/hbase-common-2.3.5.jar,file:///opt/soft/hive312/lib/hbase-protocol-2.3.5.jar,file:///opt/soft/hive312/lib/htrace-core-3.2.0-incubating.jar
添加配置
# 启动hadoop
[root@hadoop ~]# start-all.sh
# 启动zookeeper
[root@hadoop ~]# zkServer.sh start
# 启动hbase
[root@hadoop ~]# start-hbase.sh
# 启动hive
[root@hadoop ~]# nohup hive --service metastore &
[root@hadoop ~]# nohup hive --service metastore &
添加依赖
org.apache.hbase hbase-client 2.3.5 org.apache.hbase hbase-server 2.3.5
初始化:添加hbase信息,连接数据库,hbase连接工厂
@Beforepublic void init() throws IOException {System.out.println("执行init()方法");config = HBaseConfiguration.create();config.set(HConstants.HBASE_DIR,"hdfs://192.168.95.150:9000/hbase");config.set(HConstants.ZOOKEEPER_QUORUM,"192.168.95.150");config.set(HConstants.CLIENT_PORT_STR,"2181");conn = ConnectionFactory.createConnection(config);admin = conn.getAdmin();}
/*
*将连接关闭
*/@Afterpublic void close() throws IOException {System.out.println("执行close()方法");if (admin!=null)admin.close();if (conn!=null)conn.close();}
/*** 创建命名空间*/@Testpublic void createNameSpace() throws IOException {NamespaceDescriptor bigdata = NamespaceDescriptor.create("bigdata").build();admin.createNamespace(bigdata);}
/** 创建表* */@Testpublic void createTable() throws IOException {//创建表的描述类TableName tableName = TableName.valueOf("bigdata:student");HTableDescriptor desc = new HTableDescriptor(tableName);//创建列族的描述HColumnDescriptor family1 = new HColumnDescriptor("info1");HColumnDescriptor family2 = new HColumnDescriptor("info2");desc.addFamily(family1);desc.addFamily(family2);admin.createTable(desc);*/
/**查看表结构*/@Testpublic void getAllNamespace() throws IOException {List tableDesc = admin.listTableDescriptorsByNamespace("bigdata".getBytes());System.out.println(tableDesc.toString());}
/*** 插入数据*/@Testpublic void insertData() throws IOException {Table table = conn.getTable(TableName.valueOf("bigdata:student"));Put put = new Put(Bytes.toBytes("student1"));put.addColumn("info1".getBytes(),"name".getBytes(),"zs".getBytes());put.addColumn("info2".getBytes(),"school".getBytes(),"njzb".getBytes());Put put2 = new Put(Bytes.toBytes("student2"));put2.addColumn("info1".getBytes(),"name".getBytes(),"zss".getBytes());put2.addColumn("info2".getBytes(),"school".getBytes(),"njzb".getBytes());Put put3 = new Put(Bytes.toBytes("student3"));put3.addColumn("info1".getBytes(),"name".getBytes(),"zsr".getBytes());put3.addColumn("info2".getBytes(),"school".getBytes(),"njzb".getBytes());List list = new ArrayList<>();list.add(put1);list.add(put2);list.add(put3);table.put(list);}
/** 查询数据 get 查询* */@Testpublic void queryData() throws IOException {Table table = conn.getTable(TableName.valueOf("bigdata:student"));Get get = new Get(Bytes.toBytes("student1"));Result result = table.get(get);byte[] value = result.getValue(Bytes.toBytes("info1"), Bytes.toBytes("name"));System.out.println("姓名:"+Bytes.toString(value));value = result.getValue(Bytes.toBytes("info2"), Bytes.toBytes("school"));System.out.println("学校:"+Bytes.toString(value));}
/** scan 扫描数据* */@Testpublic void scanData() throws IOException {Table table = conn.getTable(TableName.valueOf("kb21:student"));Scan scan = new Scan();ResultScanner scanner = table.getScanner(scan);for (Result result : scanner) {byte[] value = result.getValue(Bytes.toBytes("info1"), Bytes.toBytes("name"));System.out.println("姓名:"+Bytes.toString(value));value = result.getValue(Bytes.toBytes("info2"), Bytes.toBytes("school"));System.out.println("学校:"+Bytes.toString(value));System.out.println(Bytes.toString(result.getRow()));}}
/**删除表* */@Testpublic void deleteTable() throws IOException {//先禁用admin.disableTable(TableName.valueOf("bigdata:student"));//再删除admin.deleteTable(TableName.valueOf("bigdata:student"));}
使用DataGrip连接Hive
主要外部表的字段需要和Hbase中的列形成映射
create external table student(id string,name string,school string
)
stored by 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' with
serdeproperties ("hbase.columns.mapping"=":key,info1:name,info2:school")
tblproperties ("hbase.table.name"="bigdata:student");
select * from student