时间:2021-07-01 10:21:17 帮助过:45人阅读
Here you have a more complex filter that does not simply filter out data based on directly available information. Rather, it lets you specify a dependent column—or reference column—that controls how other columns are filtered. It uses th
Here you have a more complex filter that does not simply filter out data based on
directly available information. Rather, it lets you specify a dependent column—or
reference column—that controls how other columns are filtered. It uses the timestamp
of the reference column and includes all other columns that have the same timestamp.
尝试找到该列所在的每一行,并返回该行具有相同时间戳的全部键值对。如果某一行不包含指定的列,则该行的任何键值对都不返回。
如果dropDependentColumn=true,则从属列不返回。
via: http://abloz.com/2012/08/22/the-hbases-content-query-2.html
package com.fatkun.filter.comparison; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.DependentColumnFilter; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.SubstringComparator; import org.apache.hadoop.hbase.filter.ValueFilter; import org.apache.hadoop.hbase.filter.WritableByteArrayComparable; import org.apache.hadoop.hbase.util.Bytes; public class TestHbaseDependentColumnFilter { String tableName = "test_value_filter"; Configuration config = HBaseConfiguration.create(); public void filter(boolean drop, CompareFilter.CompareOp operator, WritableByteArrayComparable comparator) throws IOException { HTable table = new HTable(config, tableName); // Filter filter; if (comparator != null) { // drop为true时,filter表示对"col1"列以外的所有"data1"列族数据做filter操作 // drop为false时,表示对所有"data1"列族的数据做filter操作 filter = new DependentColumnFilter(Bytes.toBytes("data1"), Bytes.toBytes("col1"), drop, operator, comparator); } else { filter = new DependentColumnFilter(Bytes.toBytes("data1"), Bytes.toBytes("col1"), drop); } // filter应用于scan Scan scan = new Scan(); scan.setFilter(filter); ResultScanner scanner = table.getScanner(scan); for (Result result : scanner) { for (KeyValue kv : result.list()) { System.out.println("kv=" + kv.toString() + ",value=" + Bytes.toString(kv.getValue())); } } scanner.close(); table.close(); } /** * 部分代码来自hbase权威指南 * * @throws IOException */ public void testFilter() throws IOException { // The dropDependentColumn parameter is giving you additional control // over how the reference column is handled: it is either included or // dropped by the filter // 1.获取整个"data1"列族当前Version中的所有timestamp等于参照列"data1:col1"的数据 System.out.println("drop=false"); filter(false, CompareFilter.CompareOp.NO_OP, null); // 2.获取除了"col1"列以外的"data1"列族中的所有timestamp等于参照列"data1:col1"的数据 System.out.println("drop=true"); filter(true, CompareFilter.CompareOp.NO_OP, null); // 3.获取除了"col1"列以外的"data1"列族当前Version中的所有timestamp等于参照列"data1:col1"的,value以"data100"开头的所有数据 System.out.println("比较"); filter(true, CompareFilter.CompareOp.EQUAL, new BinaryPrefixComparator( Bytes.toBytes("data100"))); } /** * 初始化数据 */ public void init() { // 创建表和初始化数据 try { HBaseAdmin admin = new HBaseAdmin(config); if (!admin.tableExists(tableName)) { HTableDescriptor htd = new HTableDescriptor(tableName); HColumnDescriptor hcd1 = new HColumnDescriptor("data1"); htd.addFamily(hcd1); HColumnDescriptor hcd2 = new HColumnDescriptor("data2"); htd.addFamily(hcd2); HColumnDescriptor hcd3 = new HColumnDescriptor("data3"); htd.addFamily(hcd3); admin.createTable(htd); } HTable table = new HTable(config, tableName); table.setAutoFlush(false); int count = 50; for (int i = 1; i <= count; ++i) { Put p = new Put(String.format("row%03d", i).getBytes()); p.add("data1".getBytes(), String.format("col%01d", i % 10) .getBytes(), String.format("data1%03d", i).getBytes()); p.add("data2".getBytes(), String.format("col%01d", i % 10) .getBytes(), String.format("data2%03d", i).getBytes()); p.add("data3".getBytes(), String.format("col%01d", i % 10) .getBytes(), String.format("data3%03d", i).getBytes()); table.put(p); } table.close(); } catch (IOException e) { e.printStackTrace(); } } /** * @param args * @throws IOException */ public static void main(String[] args) throws IOException { TestHbaseDependentColumnFilter test = new TestHbaseDependentColumnFilter(); test.init(); test.testFilter(); } }
原文地址:Hbase DependentColumnFilter, 感谢原作者分享。