編譯及執行方式請依照Hadoop編譯執行方式來操作
CreateTable(執行時需傳入tablename及至少一個的column family參數)
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.util.GenericOptionsParser;
public class CreateTable
{
public static void createHBaseTable(String [] str) throws IOException
{
String tablename = str[0];
// HTableDescriptor 用來描述table的屬性
HTableDescriptor htd = new HTableDescriptor(tablename);
for(int i=1; i < str.length ; i++)
{
// HTableDescriptor 透過 add() 方法來加入Column family
htd.addFamily(new HColumnDescriptor(str[i]));
}
// HBaseConfiguration 能接收 hbase-site.xml 的設定值
HBaseConfiguration config = new HBaseConfiguration();
// 檔案的操作則使用 HBaseAdmin
HBaseAdmin admin = new HBaseAdmin(config);
//檢查
if (admin.tableExists(tablename))
{
System.out.println("Table: " + tablename + "Existed.");
}
else
{
// 建立
admin.createTable(htd);
System.out.println( tablename + " created.");
}
}
public static void main(String [] argv) throws IOException
{
System.out.println("create table:"+ argv[0]);
createHBaseTable(argv);
}
}
GetColumn(執行時傳入tablename、rowkey、family、column)
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.GenericOptionsParser;
public class GetColumn
{
public static String getColumn(String tablename,String row,String family,String column)
{
HBaseConfiguration conf = new HBaseConfiguration();
String ret = "";
try
{
HTable table = new HTable(conf, Bytes.toBytes(tablename));
Get g = new Get(Bytes.toBytes(row));
Result rowResult = table.get(g);
ret = Bytes.toString(rowResult.getValue(Bytes.toBytes(family + ":" + column)));
table.close();
}
catch(IOException e)
{
e.printStackTrace();
}
return ret;
}
public static void main(String [] argv)throws IOException
{
System.out.println(getColumn(argv[0],argv[1],argv[2],argv[3]));
}
}
ScanColumn(執行時傳入tablename、family、column)
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.GenericOptionsParser;
public class ScanColumn
{
public static void scanColumn(String tablename,String family,String column)
{
HBaseConfiguration conf = new HBaseConfiguration();
try
{
HTable table = new HTable(conf, Bytes.toBytes(tablename));
ResultScanner scanner = table.getScanner(Bytes.toBytes(family));
int i=1;
for (Result rowResult : scanner)
{
byte[] by = rowResult.getValue(Bytes.toBytes(family), Bytes.toBytes(column));
String str = Bytes.toString(by);
System.out.println("row " + i + " is \"" + str + "\"");
i++;
}
}
catch(IOException e)
{
e.printStackTrace();
}
}
public static void main(String [] argv)
{
scanColumn(argv[0],argv[1],argv[2]);
}
}
PutData(執行時傳入tablename、rowkey、family、column、value)
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.GenericOptionsParser;
public class PutData
{
public static void putData(String tablename,String row,String family,String column,String value)
throws IOException
{
// HBaseConfiguration 能接收 hbase-site.xml 的設定值
HBaseConfiguration config = new HBaseConfiguration();
HTable table = new HTable(config, tablename);
byte[] brow = Bytes.toBytes(row);
byte[] bfamily = Bytes.toBytes(family);
byte[] bcolumn = Bytes.toBytes(column);
byte[] bvalue = Bytes.toBytes(value);
Put p = new Put(brow);
p.add(bfamily, bcolumn, bvalue);
table.put(p);
System.out.println("輸入資料:"+value+" 至Table:"+tablename+" "+family+":"+column);
table.close();
}
public static void main(String [] argv)throws IOException
{
putData(argv[0],argv[1],argv[2],argv[3],argv[4]);
}
}
DropTable(執行時輸入tablename)
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.util.GenericOptionsParser;
public class DropTable
{
static void drop(String tablename)
{
HBaseConfiguration conf = new HBaseConfiguration();
HBaseAdmin admin;
try
{
admin = new HBaseAdmin(conf);
if(admin.tableExists(tablename))
{
admin.disableTable(tablename);
admin.deleteTable(tablename);
System.out.println("Droped the table [" + tablename + "]");
}
else {System.out.println("Table [" + tablename + "] was not found!");}
}
catch(IOException e)
{
e.printStackTrace();
}
}
public static void main (String [] argv)
{
drop(argv[0]);
}
}