package example;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.hbase.HBaseConfiguration;
Import Org.apache.hadoop.hbase.HColumnDescriptor;
Import Org.apache.hadoop.hbase.HTableDescriptor;
Import Org.apache.hadoop.hbase.client.HBaseAdmin;
Import org.apache.hadoop.hbase.client.HTable;
Import org.apache.hadoop.hbase.util.Bytes;
Import Org.apache.hadoop.hbase.util.Pair; Administrator public class Advancedcreatetableexample {public static void Printregion (String tablename) throws Excepti
on{System.out.println ("tablename:" +tablename);
Configuration conf=hbaseconfiguration.create ();
Htable table=new htable (conf,tablename);
Pair<byte[][],byte[][]> Pair=table.getstartendkeys ();
Byte[][]firsts=pair.getfirst (); for (int i=0;i<firsts.length;i++) {if (firsts[i].length==8) {System.out.println (Bytes.tolong (firsts[i)) + "-------
-"+bytes.tolong" (Pair.getsecond () [i])); }else{System.out.println (Bytes.tostringbinary (firsts[i)) + "--------" +bytes.toStringbinary (Pair.getsecond () [i])); }}/** * @param args */public static void main (string[] args) throws exception{Configuration Conf=hbas
Econfiguration.create ();
Hbaseadmin admin=new hbaseadmin (conf);
Htabledescriptor tabdesc=new Htabledescriptor (bytes.tobytes ("test13"));
Hcolumndescriptor coldesc=new hcolumndescriptor ("fam1");
Tabdesc.addfamily (COLDESC);
Admin.createtable (Tabdesc,bytes.tobytes (1L), Bytes.tobytes (100L), 10);
Printregion ("test13");
Htabledescriptor tabdesc2=new Htabledescriptor (bytes.tobytes ("test14"));
Hcolumndescriptor coldesc2=new hcolumndescriptor ("fam1");
Tabdesc2.addfamily (COLDESC2); byte [][]spilit=new byte[][]{bytes.tobytes ("A"), Bytes.tobytes ("B"), Bytes.tobytes ("C"), Bytes.tobytes ("D
")
};
Admin.createtable (TABDESC2, spilit);
Printregion ("test14");
}
}
package example;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.hbase.HBaseConfiguration;
Import Org.apache.hadoop.hbase.HColumnDescriptor;
Import Org.apache.hadoop.hbase.HTableDescriptor;
Import Org.apache.hadoop.hbase.client.HBaseAdmin;
Administrator public
class Createtableexample {
/**
* @param args
/public static void main ( String[] args) throws exception{
Configuration conf=hbaseconfiguration.create ();
Hbaseadmin admin=new hbaseadmin (conf);
Htabledescriptor tabledesc=new htabledescriptor ("test2");
Hcolumndescriptor coldesc=new hcolumndescriptor ("col1");
Tabledesc.addfamily (COLDESC);
Admin.createtable (TABLEDESC);
System.out.println (admin.istableavailable ("test2"));
}
package example;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.hbase.HBaseConfiguration;
Import Org.apache.hadoop.hbase.client.HBaseAdmin; Administrator public class Ddeexample {/** * @param args */public static void main (string[] args) throws Except
ion{Configuration conf=hbaseconfiguration.create ();
Hbaseadmin admin=new hbaseadmin (conf);
System.out.println ("Available:" +admin.istableavailable ("Test9"));
System.out.println ("Enalbe:" +admin.istableenabled ("Test9"));
SYSTEM.OUT.PRINTLN ("Disable:" +admin.istabledisabled ("Test9"));
System.out.println ("------------");
Admin.disabletable ("Test9");
System.out.println ("Available:" +admin.istableavailable ("Test9"));
System.out.println ("Enalbe:" +admin.istableenabled ("Test9"));
SYSTEM.OUT.PRINTLN ("Disable:" +admin.istabledisabled ("Test9"));
System.out.println ("------------");
Admin.deletetable ("Test9");
System.out.println ("Available:" +admin.istableavailable ("Test9")); System.out.println("------------");
System.out.println ("exist:" +admin.tableexists ("Test9"));
}
}
package example;
Import java.util.ArrayList;
Import java.util.List;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.hbase.HBaseConfiguration;
Import Org.apache.hadoop.hbase.KeyValue;
Import org.apache.hadoop.hbase.client.HTable;
Import Org.apache.hadoop.hbase.client.Result;
Import Org.apache.hadoop.hbase.client.ResultScanner;
Import Org.apache.hadoop.hbase.client.Scan;
Import Org.apache.hadoop.hbase.filter.BinaryComparator;
Import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
Import Org.apache.hadoop.hbase.filter.Filter;
Import org.apache.hadoop.hbase.filter.FilterList;
Import Org.apache.hadoop.hbase.filter.FilterList.Operator;
Import Org.apache.hadoop.hbase.filter.QualifierFilter;
Import Org.apache.hadoop.hbase.filter.RowFilter;
Import Org.apache.hadoop.hbase.filter.SubstringComparator;
Import Org.apache.hadoop.hbase.filter.ValueFilter;
Import org.apache.hadoop.hbase.util.Bytes;
Administrator public class Filterlistexample {/** * @param args*/public static void main (string[] args) throws exception{Configuration conf=hbaseconfiguration.create ();
Htable table=new htable (conf, "testtable");
List<filter> filterlist=new arraylist<filter> ();
Filter rowfilter=new RowFilter (compareop.equal, New Substringcomparator ("6"));
Filterlist.add (RowFilter);
Filter columnfilter=new Qualifierfilter (compareop.not_equal, New Substringcomparator ("6"));
Filterlist.add (Columnfilter);
Filter valuefilter=new Valuefilter (compareop.less, New Binarycomparator) (Bytes.tobytes ("Value8"));
Filterlist.add (Valuefilter);
FilterList filters=new filterlist (filterlist);
Scan scan=new Scan ();
Scan.setfilter (filters);
Resultscanner Resultscanner=table.getscanner (scan); For [result Result:resultscanner] {for (keyvalue Kv:result.raw ()) {System.out.println (kv+ "--------" +bytes.tostring (k
V.getvalue ()));
} resultscanner.close ();
System.out.println ("------------"); filters=new filterlist (Operator.must_pass_one, filterlist);
Scan.setfilter (filters);
Resultscanner=table.getscanner (scan); For [result Result:resultscanner] {for (keyvalue Kv:result.raw ()) {System.out.println (kv+ "--------" +bytes.tostring (k
V.getvalue ()));
} resultscanner.close ();
}
}
package example;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.hbase.HBaseConfiguration;
Import org.apache.hadoop.hbase.client.HTable;
Import org.apache.hadoop.hbase.util.Bytes;
Administrator public
class Incrementexample {
/**
* @param args
/public static void main ( String[] args) throws exception{
Configuration conf=hbaseconfiguration.create ();
Htable table=new htable (conf, "testtable");
Long L=table.incrementcolumnvalue (Bytes.tobytes ("Row9"), Bytes.tobytes ("Family1"), Bytes.tobytes ("Inc"), 1);
System.out.println (l);
L=table.incrementcolumnvalue (Bytes.tobytes ("Row9"), Bytes.tobytes ("Family1"), Bytes.tobytes ("Inc"), 0);
System.out.println (l);
L=table.incrementcolumnvalue (Bytes.tobytes ("Row9"), Bytes.tobytes ("Family1"), Bytes.tobytes ("Inc"),-1);
System.out.println (l);
}
}
package example;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.hbase.HBaseConfiguration;
Import Org.apache.hadoop.hbase.HTableDescriptor;
Import Org.apache.hadoop.hbase.client.HBaseAdmin;
Import org.apache.hadoop.hbase.util.Bytes;
Administrator public
class Listtableexample {
/**
* @param args
/public static void main ( String[] args) throws Exception {
Configuration conf=hbaseconfiguration.create ();
Hbaseadmin admin=new hbaseadmin (conf);
Htabledescriptor[] Desclist=admin.listtables ();
for (Htabledescriptor desc:desclist) {
System.out.println (bytes.tostring (Desc.getname ()));
}
Htabledescriptor Desc=admin.gettabledescriptor (bytes.tobytes ("testtable"));
System.out.println (Bytes.tostring (Desc.getname ()));
}
package example;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.hbase.HBaseConfiguration;
Import Org.apache.hadoop.hbase.HColumnDescriptor;
Import Org.apache.hadoop.hbase.HTableDescriptor;
Import Org.apache.hadoop.hbase.client.HBaseAdmin;
Import org.apache.hadoop.hbase.util.Bytes; Administrator public class Modifytableexample {/** * @param args */public static void main (string[] args) throws
Exception {Configuration conf=hbaseconfiguration.create ();
Hbaseadmin admin=new hbaseadmin (conf);
Htabledescriptor desc=new htabledescriptor ("Modify");
Hcolumndescriptor coldesc=new hcolumndescriptor ("fam1");
Desc.addfamily (COLDESC);
Admin.createtable (DESC);
Htabledescriptor Desc2=admin.gettabledescriptor (bytes.tobytes ("Modify"));
Hcolumndescriptor coldesc2=new hcolumndescriptor ("Fami2");
Desc2.addfamily (COLDESC2);
Admin.disabletable ("Modify");
Admin.modifytable ("Modify", DESC2);
Admin.enabletable ("Modify"); Htabledescriptor Desc3=admin.Gettabledescriptor (Bytes.tobytes ("Modify"));
System.out.println ("is Equal:" +desc2.equals (DESC3));
System.out.println (DESC3);
}
}
package example;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.hbase.HBaseConfiguration;
Import Org.apache.hadoop.hbase.HColumnDescriptor;
Import Org.apache.hadoop.hbase.client.HBaseAdmin;
Import org.apache.hadoop.hbase.util.Bytes;
Administrator public
class ModifyTableExample2 {
/**
* @param args
/public static void main ( String[] args) throws Exception {
Configuration conf=hbaseconfiguration.create ();
Hbaseadmin admin=new hbaseadmin (conf);
Admin.enabletable ("Test8");
Admin.disabletable ("Test8");
Admin.deletecolumn ("Test8", "fam1");
Admin.enabletable ("Test8");
System.out.println (Admin.gettabledescriptor) (Bytes.tobytes ("Test8"));
Admin.disabletable ("Test8");
Admin.addcolumn ("Test8", New Hcolumndescriptor ("fam1"));
System.out.println (Admin.gettabledescriptor (Bytes.tobytes ("Test8")));
}
package example;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.hbase.HBaseConfiguration;
Import Org.apache.hadoop.hbase.KeyValue;
Import org.apache.hadoop.hbase.client.HTable;
Import org.apache.hadoop.hbase.client.Increment;
Import Org.apache.hadoop.hbase.client.Result;
Import org.apache.hadoop.hbase.util.Bytes; Administrator public class Multiincrementexample {/** * @param args */public static void main (string[] args) thr
oWS Exception {Configuration conf=hbaseconfiguration.create ();
Htable table=new htable (conf, "testtable");
Increment increment=new Increment (bytes.tobytes ("Row9"));
Increment.addcolumn (Bytes.tobytes ("Family1"), Bytes.tobytes ("Inc"), 0);
Increment.addcolumn (Bytes.tobytes ("Family1"), Bytes.tobytes ("Inc1"),-1);
Increment.addcolumn (Bytes.tobytes ("Family1"), Bytes.tobytes ("Inc2"), 2);
Result Result=table.increment (increment); For (KeyValue Kv:result.raw ()) {System.out.println (kv+ "-----" +bytes.tolong (Kv.getvalue ()));
}
}
}
package example;
Import org.apache.hadoop.conf.Configuration;
Import org.apache.hadoop.hbase.HBaseConfiguration;
Import Org.apache.hadoop.hbase.client.HTableInterface;
Import Org.apache.hadoop.hbase.client.HTablePool;
Administrator public
class Tablepoolexample {
/**
* @param args
/public static void main ( String[] args) throws exception{
Configuration conf=hbaseconfiguration.create ();
Htablepool pool=new Htablepool (conf,5);
Htableinterface [] inter=new htableinterface[10];
for (int i=0;i<inter.length;i++) {
inter[i]=pool.gettable ("TestTable");
}
for (int i=0;i<inter.length;i++) {
pool.puttable (inter[i]);
}
Pool.closetablepool ("TestTable");
}