?第一步:首先登陆ssh,之前设置了无密码登陆,因此这里不需要密码;再切换目录至/usr/local/hadoop ;再启动hadoop
ssh localhost
cd /usr/local/hadoop
./sbin/start-dfs.sh
?输入命令jps,能看到NameNode,DataNode和SecondaryNameNode都已经成功启动,表示hadoop启动成功
第二步:切换目录至/usr/local/hbase;再启动HBase.
?
进入shell界面:
?
学生表(Student)
学号(S_No) | 姓名(S_Name) | 性别(S_Sex) | 年龄(S_Age) |
2015001 | Zhangsan | male | 23 |
2015003 | Mary | female | 22 |
2015003 | Lisi | male | 24 |
课程表(Course)
课程号(C_No) | 课程名(C_Name) | 学分(C_Credit) |
123001 | Math | 2.0 |
123002 | Computer Science | 5.0 |
123003 | English | 3.0 |
选课表(SC)
学号(SC_Sno) | 课程号(SC_Cno) | 成绩(SC_Score) |
2015001 | 123001 | 86 |
2015001 | 123003 | 69 |
2015002 | 123002 | 77 |
2015002 | 123003 | 99 |
2015003 | 123001 | 98 |
2015003 | 123002 | 95 |
2. 请编程实现以下功能:
?这里只需要导入hbase安装目录中的lib文件中的所有jar包。
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import java.io.IOException;
public class ExampleForHbase{
public static Configuration configuration;//
public static Connection connection;//
public static Admin admin;//
public static void main(String[] args) throws IOException {
//创建一个表,表名为Score,列族为sname,course
createTable("Score",new String[]{"sname","course"});
}
//建立连接
public static void init() {
configuration = HBaseConfiguration.create();
configuration.set("hbase.rootdir","hdfs://localhost:9000/hbase");
try
{
connection = ConnectionFactory.createConnection(configuration);//创建连接
admin = connection.getAdmin();//表操作对象
}
catch (IOException e){
e.printStackTrace();
}
}
//关闭连接
public static void close() {
try
{
if(admin != null) {
admin.close();
}
if(null != connection) {
connection.close();
}
}
catch (IOException e)
{
e.printStackTrace();
}
}
/**
* 建表。HBase的表中会有一个系统默认的属性作为主键,主键无需自行创建,默认为put命令操作中表名后第一个数据,因此此处无需创建id列
* @param myTableName 表名
* @param colFamily 列族名
* @throws IOException
*/
public static void createTable(String myTableName,String[] colFamily) throws IOException {
init();//先调用
TableName tableName = TableName.valueOf(myTableName);//表名化作TableName对象
if(admin.tableExists(tableName))
{
System.out.println("表已经存在!");
}
else //如果不在
{
HTableDescriptor hTableDescriptor = new HTableDescriptor(tableName);//表描述对象
for(String str:colFamily) //传了所有列
{
HColumnDescriptor hColumnDescriptor = new HColumnDescriptor(str);//列描述对象
hTableDescriptor.addFamily(hColumnDescriptor);//列描述添加到表描述
}
admin.createTable(hTableDescriptor);
System.out.println("表创建成功!");
}
close();
}
}
package A;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.*;
import java.io.IOException;
public class ExampleForHbase {
public static Configuration configuration;
public static Connection connection;
public static Admin admin;
public static void addRecord(String tableName, String row, String[] fields, String[] values) throws IOException {
init();
Table table = connection.getTable(TableName.valueOf(tableName));
for (int i = 0; i != fields.length; i++) {
Put put = new Put(row.getBytes());
String[] cols = fields[i].split(":");
put.addColumn(cols[0].getBytes(), cols[1].getBytes(), values[i].getBytes());
table.put(put);
}
System.out.println("数据已插入!");
table.close();
close();
}
public static void init() {
configuration = HBaseConfiguration.create();
configuration.set("hbase.rootdir", "hdfs://localhost:9000/hbase");
try {
connection = ConnectionFactory.createConnection(configuration);
admin = connection.getAdmin();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void close() {
try {
if (admin != null) {
admin.close();
}
if (null != connection) {
connection.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
String[] fields = {"course:Math", "course:Computer Science", "course:English"};
String[] values = {"99", "80", "100"};
try {
addRecord("Score", "ss", fields, values);
} catch (IOException e) {
e.printStackTrace();
}
}
}
?
?
package A;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
public class ExampleForHbase{
public static Configuration configuration;
public static Connection connection;
public static Admin admin;
//建立连接
public static void init() {
configuration = HBaseConfiguration.create();
configuration.set("hbase.rootdir","hdfs://localhost:9000/hbase");
try
{
connection = ConnectionFactory.createConnection(configuration);
admin = connection.getAdmin();
}
catch (IOException e){
e.printStackTrace();
}
}
//关闭连接
public static void close() {
try
{
if(admin != null) {
admin.close();
}
if(null != connection) {
connection.close();
}
}
catch (IOException e)
{
e.printStackTrace();
}
}
public static void scanColumn(String tableName, String column)throws IOException {
init();
Table table = connection.getTable(TableName.valueOf(tableName));
//Get get = new Get(rowKey.getBytes());
//get.addColumn(colFamily.getBytes(),col.getBytes());
//Result result = table.get(get);
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes(column));
ResultScanner scanner = table.getScanner(scan);
for (Result result = scanner.next(); result != null; result = scanner.next()) {
showCell(result);
}
table.close();
close();
}
/**
* 格式化输出
* @param result
*/
public static void showCell(Result result) {
Cell[] cells = result.rawCells();
for(Cell cell:cells) {
System.out.println("RowName:"+new String(CellUtil.cloneRow(cell))+" ");
System.out.println("Timetamp:"+cell.getTimestamp()+" ");
System.out.println("column Family:"+new String(CellUtil.cloneFamily(cell))+" ");
System.out.println("Column Name:"+new String(CellUtil.cloneQualifier(cell))+" ");
System.out.println("value:"+new String(CellUtil.cloneValue(cell))+" ");
}
}
public static void main(String[] args) throws IOException {
scanColumn("Score","course");
}
}