python操作hbase
Jupyter开发环境
1、右侧打开Jupyter,并创建python3项目。
python操作hbase例子
import happybase
def connectHBase():
#创建和hbase的连接
connection = happybase.Connection('app-11')
#获取hbase中的所有表
print(connection.tables())
#关闭连接
connection.close()
def createTable():
connection = happybase.Connection('app-11')
connection.create_table('user',{'cf1': dict()})
print(connection.tables())
connection.close()
def scanQuery():
# 创建和hbase的连接
connection = happybase.Connection('app-11')
#通过connection找到user表 获得table对象
table = connection.table('user')
filter = "ColumnPrefixFilter('username')"
#row_start 指定起始rowkey 缩小查询范围
#filter 添加过滤器
for key,value in table.scan(row_start='rowkey_10',filter=filter):
print(key,value)
# 关闭连接
connection.close()
def getQuery():
connection = happybase.Connection('app-11')
# 通过connection找到user表 获得table对象
table = connection.table('user')
result = table.row('rowkey_22',columns=['base_info:username'])
#result = table.row('rowkey_22',columns=['base_info:username'])
result = table.rows(['rowkey_22','rowkey_16'],columns=['base_info:username'])
print(result)
# 关闭连接
connection.close()
def insertData():
connection = happybase.Connection('app-11')
# 通过connection找到user表 获得table对象
table = connection.table('user')
table.put('rk_01',{'cf1:address':'beijing'})
# 关闭连接
for key,value in table.scan():
print(key,value)
connection.close()
def deleteData():
connection = happybase.Connection('app-11')
# 通过connection找到user表 获得table对象
table = connection.table('user')
table.delete('rk_01',['cf1:username'])
# 关闭连接
for key,value in table.scan():
print(key,value)
connection.close()
def deletetable():
#创建和hbase的连接
connection = happybase.Connection('app-11')
#获取hbase中的所有表
connection.delete_table('user',disable=True)
print(connection.tables())
#关闭连接
connection.close()
connectHBase()
createTable()
scanQuery()
insertData()
详细学习内容可观看Spark快速大数据处理扫一扫~~~或者引擎搜索Spark余海峰