Commit 17fd3674 by haozi

HBase starter

parent 589d5ff3
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>matrix-bigdata</artifactId>
<groupId>com.secoo.mall</groupId>
<version>1.3.2.RELEASE</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>matrix-bigdata-demo</artifactId>
<dependencies>
<!--lombok-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>com.secoo.mall</groupId>
<artifactId>logger-starter</artifactId>
</dependency>
<dependency>
<groupId>com.secoo.mall</groupId>
<artifactId>matrix-bigdata-hbase-starter</artifactId>
</dependency>
</dependencies>
</project>
\ No newline at end of file
package mall;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
@Slf4j
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
log.info("matrix-bigdata-demo SpringBoot Start Success");
}
}
package mall.hbase;
import com.secoo.mall.hbase.spring.boot.autoconfigure.HbaseTemplate;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 简单 HBase 操作
*
* @author zhanghao
* @date 2020-03-1719:20
*/
@Component
@Slf4j
public class SimpleHBase implements InitializingBean {
@Resource
private HbaseTemplate hbaseTemplate;
private static final String TABLE_NAME = "mytable";
private static final String CF_DEFAULT = "cf";
public static final byte[] QUALIFIER = "col1".getBytes();
private static final byte[] ROWKEY = "rowkey1".getBytes();
/**
* 建表
*
* @throws IOException
*/
public void createTable() throws IOException {
List<ColumnFamilyDescriptor> columnFamilyDescriptors = new ArrayList<ColumnFamilyDescriptor>();
columnFamilyDescriptors.add(ColumnFamilyDescriptorBuilder.of(CF_DEFAULT));
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf(TABLE_NAME))
.setColumnFamilies(columnFamilyDescriptors).build();
log.info("Creating table. ");
Admin admin = hbaseTemplate.getConnection().getAdmin();
admin.createTable(tableDescriptor);
log.info(" Done.");
}
/**
* 保存更新
*/
public void put() {
Put put = new Put(ROWKEY);
put.addColumn(CF_DEFAULT.getBytes(), QUALIFIER, "this is value".getBytes());
hbaseTemplate.saveOrUpdate(TABLE_NAME, put);
}
/**
* 查询
*/
public void get() {
hbaseTemplate.get(TABLE_NAME, "rowkey1", CF_DEFAULT, (result, rowNum) -> {
Map<String, String> map = new HashMap();
for (Cell cell : result.rawCells()) {
log.info("行健: " + new String(CellUtil.cloneRow(cell)));
log.info("列簇: " + new String(CellUtil.cloneFamily(cell)));
log.info("列: " + new String(CellUtil.cloneQualifier(cell)));
log.info("值: " + new String(CellUtil.cloneValue(cell)));
log.info("时间戳: " + cell.getTimestamp());
map.put(new String(CellUtil.cloneQualifier(cell)), new String(CellUtil.cloneValue(cell)));
}
return map;
});
}
@Override
public void afterPropertiesSet() throws Exception {
createTable();
// put();
// get();
}
}
spring:
application:
name: matrix-bigdata-demo
server:
port: 6080
hbase:
zookeeper:
quorum: 10.0.255.184:2181,10.0.255.185:2181,10.0.255.186:2181
...@@ -11,5 +11,23 @@ ...@@ -11,5 +11,23 @@
<artifactId>matrix-bigdata-hbase-starter</artifactId> <artifactId>matrix-bigdata-hbase-starter</artifactId>
<dependencies>
<!-- hbase -->
<dependency>
<groupId>com.aliyun.hbase</groupId>
<artifactId>alihbase-client</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-source-plugin</artifactId>
</plugin>
</plugins>
</build>
</project> </project>
\ No newline at end of file
package com.secoo.mall.hbase.spring.boot.autoconfigure;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.core.env.Environment;
import org.springframework.util.Assert;
@org.springframework.context.annotation.Configuration
@ConditionalOnClass({HbaseTemplate.class, Configuration.class})
public class HbaseAutoConfiguration {
// @Bean
// @ConditionalOnMissingBean(Configuration.class)
// public Configuration hBaseConfiguration(Environment env) {
// String zookeeperQuorum = env.getProperty("hbase.zookeeper.quorum");
// String zookeeperClientPort = env.getProperty("hbase.zookeeper.property.clientPort");
// String zookeeperZnodeParent = env.getProperty("hbase.zookeeper.znode.parent");
// String clientRetriesNumber = env.getProperty("hbase.client.retries.number");
// String threadsMax = env.getProperty("hbase.hconnection.threads.max");
// String threadsKeepalivetime = env.getProperty("hbase.hconnection.threads.keepalivetime");
// String clientMaxTotalTasks = env.getProperty("hbase.client.max.total.tasks");
//
// Assert.notNull(zookeeperQuorum, "zk address is null!!!");
// Assert.notNull(zookeeperClientPort, "zk port is null!!!");
//
// Configuration configuration = HBaseConfiguration.create();
// configuration.set(HConstants.ZOOKEEPER_QUORUM, zookeeperQuorum);
// configuration.set(HConstants.ZOOKEEPER_CLIENT_PORT, zookeeperClientPort);
// if (StringUtils.isNotEmpty(zookeeperZnodeParent))
// configuration.set(HConstants.ZOOKEEPER_ZNODE_PARENT, zookeeperZnodeParent);
// if (StringUtils.isNotEmpty(clientRetriesNumber))
// configuration.set(HConstants.HBASE_CLIENT_RETRIES_NUMBER, clientRetriesNumber);
// if (threadsMax != null) configuration.set("hbase.hconnection.threads.max", threadsMax);
// if (threadsKeepalivetime != null)
// configuration.set("hbase.hconnection.threads.keepalivetime", threadsKeepalivetime);
// if (clientMaxTotalTasks != null)
// configuration.set("hbase.config.hbase.client.max.total.tasks", clientMaxTotalTasks);
// return configuration;
// }
@Bean
@ConditionalOnMissingBean(Configuration.class)
public Configuration hBaseConfiguration(Environment env) {
String zookeeperQuorum = env.getProperty("hbase.zookeeper.quorum");
Assert.notNull(zookeeperQuorum, "zk address is null!!!");
Configuration configuration = HBaseConfiguration.create();
configuration.set(HConstants.ZOOKEEPER_QUORUM, zookeeperQuorum);
return configuration;
}
@Bean
@ConditionalOnMissingBean(HbaseTemplate.class)
public HbaseTemplate hbaseTemplate(Configuration configuration) {
return new HbaseTemplate(configuration);
}
}
package com.secoo.mall.hbase.spring.boot.autoconfigure;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Scan;
import java.util.List;
public interface HbaseOperations {
<T> T execute(String tableName, TableCallback<T> mapper);
/**
* Scans the target table, using the given column family.
* The content is processed row by row by the given action, returning a list of domain objects.
*
* @param tableName target table
* @param family column family
* @param <T> action type
* @return a list of objects mapping the scanned rows
*/
<T> List<T> find(String tableName, String family, final RowMapper<T> mapper);
/**
* Scans the target table, using the given column family.
* The content is processed row by row by the given action, returning a list of domain objects.
*
* @param tableName target table
* @param family column family
* @param qualifier column qualifier
* @param <T> action type
* @return a list of objects mapping the scanned rows
*/
<T> List<T> find(String tableName, String family, String qualifier, final RowMapper<T> mapper);
/**
* Scans the target table using the given {@link Scan} object. Suitable for maximum control over the scanning
* process.
* The content is processed row by row by the given action, returning a list of domain objects.
*
* @param tableName target table
* @param scan table scanner
* @param <T> action type
* @return a list of objects mapping the scanned rows
*/
<T> List<T> find(String tableName, final Scan scan, final RowMapper<T> mapper);
/**
* Gets an individual row from the given table. The content is mapped by the given action.
*
* @param tableName target table
* @param rowName row name
* @param mapper row mapper
* @param <T> mapper type
* @return object mapping the target row
*/
<T> T get(String tableName, String rowName, final RowMapper<T> mapper);
/**
* Gets an individual row from the given table. The content is mapped by the given action.
*
* @param tableName target table
* @param rowName row name
* @param familyName column family
* @param mapper row mapper
* @param <T> mapper type
* @return object mapping the target row
*/
<T> T get(String tableName, String rowName, String familyName, final RowMapper<T> mapper);
/**
* Gets an individual row from the given table. The content is mapped by the given action.
*
* @param tableName target table
* @param rowName row name
* @param familyName family
* @param qualifier column qualifier
* @param mapper row mapper
* @param <T> mapper type
* @return object mapping the target row
*/
<T> T get(String tableName, final String rowName, final String familyName, final String qualifier, final RowMapper<T> mapper);
/**
* 执行put update or delete
*
* @param tableName
* @param action
*/
void execute(String tableName, MutatorCallback action);
/**
* @param tableName
* @param mutation
*/
void saveOrUpdate(String tableName, Mutation mutation);
/**
* @param tableName
* @param mutations
*/
void saveOrUpdates(String tableName, List<Mutation> mutations);
}
package com.secoo.mall.hbase.spring.boot.autoconfigure;
public class HbaseSystemException extends RuntimeException {
public HbaseSystemException(Exception cause) {
super(cause.getMessage(), cause);
}
public HbaseSystemException(Throwable throwable) {
super(throwable.getMessage(), throwable);
}
}
package com.secoo.mall.hbase.spring.boot.autoconfigure;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.BufferedMutatorParams;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.util.Assert;
import org.springframework.util.StopWatch;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class HbaseTemplate implements HbaseOperations, DisposableBean {
private static final Logger LOGGER = LoggerFactory.getLogger(HbaseTemplate.class);
private Configuration configuration;
private volatile Connection connection;
public HbaseTemplate(Configuration configuration) {
this.setConfiguration(configuration);
Assert.notNull(configuration, " a valid configuration is required");
}
@Override
public <T> T execute(String tableName, TableCallback<T> action) {
Assert.notNull(action, "Callback object must not be null");
Assert.notNull(tableName, "No table specified");
StopWatch sw = new StopWatch();
sw.start();
Table table = null;
try {
table = this.getConnection().getTable(TableName.valueOf(tableName));
return action.doInTable(table);
} catch (Throwable throwable) {
throw new HbaseSystemException(throwable);
} finally {
if (null != table) {
try {
table.close();
sw.stop();
} catch (IOException e) {
LOGGER.error("hbase资源释放失败");
}
}
}
}
@Override
public <T> List<T> find(String tableName, String family, final RowMapper<T> action) {
Scan scan = new Scan();
scan.setCaching(5000);
scan.addFamily(Bytes.toBytes(family));
return this.find(tableName, scan, action);
}
@Override
public <T> List<T> find(String tableName, String family, String qualifier, final RowMapper<T> action) {
Scan scan = new Scan();
scan.setCaching(5000);
scan.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier));
return this.find(tableName, scan, action);
}
@Override
public <T> List<T> find(String tableName, final Scan scan, final RowMapper<T> action) {
return this.execute(tableName, new TableCallback<List<T>>() {
@Override
public List<T> doInTable(Table table) throws Throwable {
int caching = scan.getCaching();
// 如果caching未设置(默认是1),将默认配置成5000
if (caching == 1) {
scan.setCaching(5000);
}
ResultScanner scanner = table.getScanner(scan);
try {
List<T> rs = new ArrayList<>();
int rowNum = 0;
for (Result result : scanner) {
rs.add(action.mapRow(result, rowNum++));
}
return rs;
} finally {
scanner.close();
}
}
});
}
@Override
public <T> T get(String tableName, String rowName, final RowMapper<T> mapper) {
return this.get(tableName, rowName, null, null, mapper);
}
@Override
public <T> T get(String tableName, String rowName, String familyName, final RowMapper<T> mapper) {
return this.get(tableName, rowName, familyName, null, mapper);
}
@Override
public <T> T get(String tableName, final String rowName, final String familyName, final String qualifier, final RowMapper<T> mapper) {
return this.execute(tableName, new TableCallback<T>() {
@Override
public T doInTable(Table table) throws Throwable {
Get get = new Get(Bytes.toBytes(rowName));
if (StringUtils.isNotBlank(familyName)) {
byte[] family = Bytes.toBytes(familyName);
if (StringUtils.isNotBlank(qualifier)) {
get.addColumn(family, Bytes.toBytes(qualifier));
} else {
get.addFamily(family);
}
}
Result result = table.get(get);
return mapper.mapRow(result, 0);
}
});
}
@Override
public void execute(String tableName, MutatorCallback action) {
Assert.notNull(action, "Callback object must not be null");
Assert.notNull(tableName, "No table specified");
StopWatch sw = new StopWatch();
sw.start();
BufferedMutator mutator = null;
try {
BufferedMutatorParams mutatorParams = new BufferedMutatorParams(TableName.valueOf(tableName));
mutator = this.getConnection().getBufferedMutator(mutatorParams.writeBufferSize(3 * 1024 * 1024));
action.doInMutator(mutator);
} catch (Throwable throwable) {
sw.stop();
throw new HbaseSystemException(throwable);
} finally {
if (null != mutator) {
try {
mutator.flush();
mutator.close();
sw.stop();
} catch (IOException e) {
LOGGER.error("hbase mutator资源释放失败");
}
}
}
}
@Override
public void saveOrUpdate(String tableName, final Mutation mutation) {
this.execute(tableName, mutator -> {
mutator.mutate(mutation);
});
}
@Override
public void saveOrUpdates(String tableName, final List<Mutation> mutations) {
this.execute(tableName, mutator -> {
mutator.mutate(mutations);
});
}
public void setConnection(Connection connection) {
this.connection = connection;
}
public Connection getConnection() {
if (null == this.connection) {
synchronized (this) {
if (null == this.connection) {
try {
this.connection = ConnectionFactory.createConnection(configuration);
} catch (IOException e) {
LOGGER.error("hbase connection资源池创建失败");
}
}
}
}
return this.connection;
}
public Configuration getConfiguration() {
return configuration;
}
public void setConfiguration(Configuration configuration) {
this.configuration = configuration;
}
@Override
public void destroy() throws Exception {
if (this.getConnection() != null) {
try {
this.getConnection().close();
} catch (IOException e) {
LOGGER.error("hbase connection关闭异常!!!", e);
}
}
}
}
package com.secoo.mall.hbase.spring.boot.autoconfigure;
import org.apache.hadoop.hbase.client.BufferedMutator;
public interface MutatorCallback {
/**
* 使用mutator api to update put and delete
*
* @param mutator
* @throws Throwable
*/
void doInMutator(BufferedMutator mutator) throws Throwable;
}
package com.secoo.mall.hbase.spring.boot.autoconfigure;
import org.apache.hadoop.hbase.client.Result;
public interface RowMapper<T> {
T mapRow(Result result, int rowNum) throws Exception;
}
package com.secoo.mall.hbase.spring.boot.autoconfigure;
import org.apache.hadoop.hbase.client.Table;
public interface TableCallback<T> {
/**
* Gets called by {@link HbaseTemplate} execute with an active Hbase table. Does need to care about activating or closing down the table.
*
* @param table active Hbase table
* @return a result object, or null if none
* @throws Throwable thrown by the Hbase API
*/
T doInTable(Table table) throws Throwable;
}
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
com.secoo.mall.hbase.spring.boot.autoconfigure.HbaseAutoConfiguration
\ No newline at end of file
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
<modules> <modules>
<module>matrix-bigdata-hbase-starter</module> <module>matrix-bigdata-hbase-starter</module>
<module>matrix-bigdata-spark-starter</module> <module>matrix-bigdata-spark-starter</module>
<module>matrix-bigdata-demo</module>
</modules> </modules>
......
...@@ -231,6 +231,12 @@ ...@@ -231,6 +231,12 @@
<artifactId>apollo-client</artifactId> <artifactId>apollo-client</artifactId>
<version>1.4.0</version> <version>1.4.0</version>
</dependency> </dependency>
<!-- hbase -->
<dependency>
<groupId>com.aliyun.hbase</groupId>
<artifactId>alihbase-client</artifactId>
<version>2.0.3</version>
</dependency>
</dependencies> </dependencies>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment