• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Java BinaryPrefixComparator类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Java中org.apache.hadoop.hbase.filter.BinaryPrefixComparator的典型用法代码示例。如果您正苦于以下问题:Java BinaryPrefixComparator类的具体用法?Java BinaryPrefixComparator怎么用?Java BinaryPrefixComparator使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



BinaryPrefixComparator类属于org.apache.hadoop.hbase.filter包,在下文中一共展示了BinaryPrefixComparator类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。

示例1: ByteArrayComparableModel

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public ByteArrayComparableModel(
    ByteArrayComparable comparator) {
  String typeName = comparator.getClass().getSimpleName();
  ComparatorType type = ComparatorType.valueOf(typeName);
  this.type = typeName;
  switch (type) {
    case BinaryComparator:
    case BinaryPrefixComparator:
      this.value = Base64.encodeBytes(comparator.getValue());
      break;
    case BitComparator:
      this.value = Base64.encodeBytes(comparator.getValue());
      this.op = ((BitComparator)comparator).getOperator().toString();
      break;
    case NullComparator:
      break;
    case RegexStringComparator:
    case SubstringComparator:
      this.value = Bytes.toString(comparator.getValue());
      break;
    default:
      throw new RuntimeException("unhandled filter type: " + type);
  }
}
 
开发者ID:fengchen8086,项目名称:ditb,代码行数:25,代码来源:ScannerModel.java


示例2: makeResponseTimeFilter

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
/**
 * make the hbase filter for selecting values of y-axis(response time) in order to select transactions in scatter chart.
 * 4 bytes for elapsed time should be attached for the prefix of column qualifier for to use this filter.
 *
 * @param area
 * @param offsetTransactionId
 * @param offsetTransactionElapsed
 * @return
 */
private Filter makeResponseTimeFilter(final SelectedScatterArea area, final TransactionId offsetTransactionId, int offsetTransactionElapsed) {
    // filter by response time
    ResponseTimeRange responseTimeRange = area.getResponseTimeRange();
    byte[] responseFrom = Bytes.toBytes(responseTimeRange.getFrom());
    byte[] responseTo = Bytes.toBytes(responseTimeRange.getTo());
    FilterList filterList = new FilterList(Operator.MUST_PASS_ALL);
    filterList.addFilter(new QualifierFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(responseFrom)));
    filterList.addFilter(new QualifierFilter(CompareOp.LESS_OR_EQUAL, new BinaryPrefixComparator(responseTo)));

    // add offset
    if (offsetTransactionId != null) {
        final Buffer buffer = new AutomaticBuffer(32);
        buffer.putInt(offsetTransactionElapsed);
        buffer.putPrefixedString(offsetTransactionId.getAgentId());
        buffer.putSVLong(offsetTransactionId.getAgentStartTime());
        buffer.putVLong(offsetTransactionId.getTransactionSequence());
        byte[] qualifierOffset = buffer.getBuffer();

        filterList.addFilter(new QualifierFilter(CompareOp.GREATER, new BinaryPrefixComparator(qualifierOffset)));
    }
    return filterList;
}
 
开发者ID:naver,项目名称:pinpoint,代码行数:32,代码来源:HbaseApplicationTraceIndexDao.java


示例3: testAddColumnFilterToScanCompareOpNull

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
@Test
public void testAddColumnFilterToScanCompareOpNull() throws Exception {
  ColumnFilter cf = new ColumnFilter( "Family" );
  cf.setConstant( "123" );
  cf.setSignedComparison( true );

  HBaseValueMeta meta = new HBaseValueMeta( "colFamly,colname,Family", 1, 20, 1 );
  meta.setIsLongOrDouble( true );
  VariableSpace space = mockVariableSpace();
  connectionSpy.m_sourceScan = new Scan();
  doReturn( null ).when( connectionSpy ).getCompareOpByComparisonType( any( ColumnFilter.ComparisonType.class ) );

  connectionSpy.addColumnFilterToScan( cf, meta, space, true );
  FilterList filter = (FilterList) connectionSpy.m_sourceScan.getFilter();
  assertFalse( filter.getFilters().isEmpty() );
  Assert.assertEquals( filter.getFilters().size(), 1 );
  Assert.assertEquals( BinaryPrefixComparator.class,
    ( (CompareFilter) filter.getFilters().get( 0 ) ).getComparator().getClass() );
}
 
开发者ID:pentaho,项目名称:pentaho-hadoop-shims,代码行数:20,代码来源:CommonHBaseConnectionTest.java


示例4: getRecommendedUserItem

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public List<GroupedData> getRecommendedUserItem(String cp, String collection, long user, Long from, Long startDate,
		Long endDate, int size)
{
	FilterList filters = new FilterList();

	if (from == null)
	{
		filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys.getStatRecommendedItemKey(
				collection, user))));
	}
	else
	{
		filters.addFilter(new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(RowKeys
				.getStatRecommendedItemKey(collection, user, from))));
	}

	setDateLimit(STATS_RECOMMENDED_USERITEM, startDate, endDate, filters);

	Scan scan = new Scan().addFamily(STATS_RECOMMENDED_USERITEM).setFilter(filters);

	return getResults(cp, scan, STATS_RECOMMENDED_USERITEM, size);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:23,代码来源:StatisticsTable.java


示例5: getUserRated

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public List<GroupedData> getUserRated(String cp, String collection, Long from, Long startDate, Long endDate, int size)
{
	// Put put = new Put(RowKeys.getStatRatingsPerItemKey(collection, item, date));
	Scan scan = new Scan();
	scan.addFamily(STATS_USER_RATINGS);
	FilterList filters = new FilterList();
	if (from == null)
	{
		filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys
				.getStatRatingsPerUserKey(collection))));
	}
	else
	{
		filters.addFilter(new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(RowKeys
				.getStatRatingsPerUserKey(collection, from.longValue()))));
	}

	setDateLimit(STATS_USER_RATINGS, startDate, endDate, filters);

	scan.setFilter(filters);

	return getResults(cp, scan, STATS_USER_RATINGS, size);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:24,代码来源:StatisticsTable.java


示例6: getItemRated

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public List<GroupedData> getItemRated(String cp, String collection, Long from, Long startDate, Long endDate, int size)
{
	// Put put = new Put(RowKeys.getStatRatingsPerItemKey(collection, item, date));
	Scan scan = new Scan();
	scan.addFamily(STATS_ITEM_RATINGS);
	FilterList filters = new FilterList();
	if (from == null)
	{
		filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys
				.getStatRatingsPerItemKey(collection))));
	}
	else
	{
		filters.addFilter(new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(RowKeys
				.getStatRatingsPerItemKey(collection, from.longValue()))));
	}

	setDateLimit(STATS_ITEM_RATINGS, startDate, endDate, filters);

	scan.setFilter(filters);

	return getResults(cp, scan, STATS_ITEM_RATINGS, size);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:24,代码来源:StatisticsTable.java


示例7: getSources

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
/**
 * return the list of content providers
 * @param cp
 * @param recommender
 * @return
 * @throws IOException
 * @see nl.gridline.zieook.statistics.SourcesByRecommenderMap
 * @see nl.gridline.zieook.statistics.SourcesByRecommenderReduce
 */
public List<GroupedData> getSources(String cp, String recommender, String from, Long startDate, Long endDate,
		int size)
{
	Scan scan = new Scan();
	scan.addFamily(STATS_VIEWED_SOURCE);
	FilterList filters = new FilterList();

	if (from == null)
	{
		filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys
				.getStatSourcesKey(recommender))));
	}
	else
	{
		filters.addFilter(new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(RowKeys
				.getStatSourcesKey(recommender, from))));
	}

	setDateLimit(STATS_VIEWED_SOURCE, startDate, endDate, filters);

	scan.setFilter(filters);

	return getResults(cp, scan, STATS_VIEWED_SOURCE, size);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:34,代码来源:StatisticsTable.java


示例8: getViewed

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public List<GroupedData> getViewed(String cp, String recommender, Long from, Long startDate, Long endDate, int size)
{
	Scan scan = new Scan();
	scan.addFamily(STATS_VIEWED_ITEM);

	FilterList filters = new FilterList();

	if (from == null)
	{
		filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys
				.getStatViewedKey(recommender))));
	}
	else
	{
		filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys.getStatViewedKey(
				recommender, from.longValue()))));
	}

	setDateLimit(STATS_VIEWED_ITEM, startDate, endDate, filters);

	scan.setFilter(filters);

	return getResults(cp, scan, STATS_VIEWED_ITEM, size);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:25,代码来源:StatisticsTable.java


示例9: deleteViews

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public long deleteViews(String cp, String recommender)
{
	FilterList filters = new FilterList();

	// filter column-family & recommender name
	filters.addFilter(new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(EVENTLOG_COLUMN_USERVIEW)));
	filters
			.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys.getUserViewKey(recommender))));

	Scan scan = new Scan().addFamily(EVENTLOG_COLUMN_USERVIEW).setFilter(filters);

	// TODO put this in a map-reduce delete.. that executes in background..
	// we only need to pass the table & a scan object. should be quite easy

	long count = deleteAll(scan, cp, EVENTLOG_COLUMN_RECOMMENDED);

	return count;
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:19,代码来源:EventLogTable.java


示例10: getEventLogRecommendedScanner

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
/**
 * return a recommended scanner with an optional start date and end date
 * @param startDate start date
 * @param endDate end date
 * @return
 */
public Scan getEventLogRecommendedScanner(Long startDate, Long endDate)
{
	Scan scan = new Scan().addFamily(EVENTLOG_COLUMN_RECOMMENDED);
	FilterList filters = new FilterList();
	filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys.getRecommendedItemKey())));
	// timestamp filter:
	if (startDate != null)
	{
		SingleColumnValueFilter startFilter = new SingleColumnValueFilter(EVENTLOG_COLUMN_RECOMMENDED,
				ModelConstants.TIMESTAMP, CompareOp.GREATER_OR_EQUAL, Bytes.toBytes(startDate.longValue()));
		startFilter.setFilterIfMissing(true);
		filters.addFilter(startFilter);
	}

	if (endDate != null)
	{
		SingleColumnValueFilter endFilter = new SingleColumnValueFilter(EVENTLOG_COLUMN_RECOMMENDED,
				ModelConstants.TIMESTAMP, CompareOp.LESS, Bytes.toBytes(endDate.longValue()));
		endFilter.setFilterIfMissing(true);
		filters.addFilter(endFilter);
	}

	return scan.setFilter(filters);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:31,代码来源:StatisticsTool.java


示例11: getTopRecommended

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public List<GroupedData> getTopRecommended(String cp, String collection, long user, Long startDate, Long endDate,
		int size)
{

	FilterList filters = new FilterList();
	filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys.getStatRecommendedItemKey(
			collection, user))));
	setDateLimit(STATS_RECOMMENDED_USERITEM, startDate, endDate, filters);

	Scan scan = new Scan().addFamily(STATS_RECOMMENDED_USERITEM).setFilter(filters);

	return getSortedResults(cp, scan, STATS_RECOMMENDED_USERITEM, size);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:14,代码来源:StatisticsTable.java


示例12: getRecommendedUser

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public List<GroupedData> getRecommendedUser(String cp, String collection, long user, Long startDate, Long endDate,
		int size)
{
	FilterList filters = new FilterList();

	filters.addFilter(new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(RowKeys
			.getStatRecommendedKey(collection, user))));
	setDateLimit(STATS_RECOMMENDED_USER, startDate, endDate, filters);
	Scan scan = new Scan().addFamily(STATS_RECOMMENDED_USER).setFilter(filters);
	return getResults(cp, scan, STATS_RECOMMENDED_USER, size);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:12,代码来源:StatisticsTable.java


示例13: getCollectionSources

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
/**
 * @param cp
 * @param collection
 * @param from
 * @param startDate
 * @param endDate
 * @param size
 * @return
 */
public List<GroupedData> getCollectionSources(String cp, String collection, String from, Long startDate,
		Long endDate, Integer size)
{

	Scan scan = new Scan().addFamily(STATS_COLLECTION_SOURCE);

	FilterList filters = new FilterList();

	filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys
			.getStatSourcesCollectionKey(collection))));

	if (from == null)
	{
		filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys
				.getStatSourcesCollectionKey(collection))));
	}
	else
	{
		filters.addFilter(new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(RowKeys
				.getStatSourcesCollectionKey(collection, from))));
	}

	setDateLimit(STATS_COLLECTION_SOURCE, startDate, endDate, filters);

	scan.setFilter(filters);

	return getResults(cp, scan, STATS_COLLECTION_SOURCE, size);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:38,代码来源:StatisticsTable.java


示例14: getItemFilter

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
private FilterList getItemFilter(String collection, String regexp, Set<Long> items)
{

	// Filter on the given items (this is an OR filter on the rows)
	FilterList itemFilter = new FilterList(Operator.MUST_PASS_ONE);
	for (long i : items)
	{
		// row filter is probably faster, (don't know for sure) - otherwise single column-value filter is also
		// possible.
		itemFilter.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryComparator(RowKeys.getCollectionKey(collection,
				i))));

	}

	FilterList filters = new FilterList();
	// filter on collection:
	filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(Bytes.toBytes(collection))));
	// filter on items:
	filters.addFilter(itemFilter);

	SingleColumnValueFilter value = new SingleColumnValueFilter(COLUMN_INTR, ModelConstants.TITLE, CompareOp.EQUAL,
			new RegexStringComparator(regexp));

	value.setFilterIfMissing(true);
	filters.addFilter(value);

	return filters;

}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:30,代码来源:CollectionViewsTable.java


示例15: getItems

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
/**
 * Return the first 100 collection items for the given content provider and collection
 * @param cp content provider name
 * @param collection collection name
 * @return list of collection items
 */
public List<CollectionItem> getItems(String cp, String collection, int size)
{
	Filter filter = new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(Bytes.toBytes(collection)));
	Scan scan = new Scan().addFamily(COLUMN_INTR).setFilter(filter);
	return getItems(cp, scan, size);

}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:14,代码来源:CollectionTable.java


示例16: searchItems

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public List<CollectionItem> searchItems(String cp, String collection, String regexTitle, int size)
{
	FilterList filters = new FilterList();

	filters
			.addFilter(new RowFilter(CompareOp.GREATER_OR_EQUAL, new BinaryPrefixComparator(Bytes.toBytes(collection))));
	SingleColumnValueFilter value = new SingleColumnValueFilter(COLUMN_INTR, ModelConstants.TITLE, CompareOp.EQUAL,
			new RegexStringComparator(regexTitle));
	value.setFilterIfMissing(true);
	filters.addFilter(value);

	Scan scan = new Scan().addFamily(COLUMN_INTR).setFilter(filters);
	return getItems(cp, scan, size);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:15,代码来源:CollectionTable.java


示例17: deleteCollection

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
/**
 * @param cp
 * @param collection
 */
public void deleteCollection(String cp, String collection)
{
	FilterList filters = new FilterList();
	filters
			.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys.getCollectionKey(collection))));

	Scan scan = new Scan().addFamily(COLUMN_INTR).setFilter(filters);
	deleteAll(scan, cp, COLUMN_INTR);

	scan = new Scan().addFamily(COLUMN_RAW).setFilter(filters);
	deleteAll(scan, cp, COLUMN_RAW);

}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:18,代码来源:CollectionTable.java


示例18: getContentProviders

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public List<ContentProvider> getContentProviders()
{
	List<ContentProvider> providers = new LinkedList<ContentProvider>();

	Scan scan = new Scan();
	scan.addFamily(COLUMN_CONTENTPROVIDER);
	scan.setFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(CONTENT_PRIVIDER_BYTES)));

	try
	{
		HTableInterface tableInterface = table.getTable();
		ResultScanner scanner = tableInterface.getScanner(scan);
		try
		{
			Result result = null;
			while ((result = scanner.next()) != null)
			{
				providers.add(new ContentProvider(result.getFamilyMap(COLUMN_CONTENTPROVIDER)));
			}
		}
		finally
		{
			scanner.close();
			table.putTable(tableInterface);
		}
	}
	catch (IOException e)
	{
		LOG.error("Error while getting content providers.", e);
	}

	return providers;
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:34,代码来源:ContentProviderTable.java


示例19: deleteView

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public long deleteView(String cp, String recommender, long user, Long startDate, Long endDate)
{
	FilterList filters = new FilterList();
	filters.addFilter(new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(EVENTLOG_COLUMN_USERVIEW)));
	filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys.getUserViewKey(recommender,
			user))));
	Scan scan = new Scan().addFamily(EVENTLOG_COLUMN_USERVIEW).setFilter(filters);
	return deleteAll(scan, cp, EVENTLOG_COLUMN_USERVIEW);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:10,代码来源:EventLogTable.java


示例20: deleteRecommend

import org.apache.hadoop.hbase.filter.BinaryPrefixComparator; //导入依赖的package包/类
public long deleteRecommend(String cp, String recommender)
{
	FilterList filters = new FilterList();

	filters.addFilter(new FamilyFilter(CompareOp.EQUAL, new BinaryComparator(EVENTLOG_COLUMN_RECOMMENDED)));
	filters.addFilter(new RowFilter(CompareOp.EQUAL, new BinaryPrefixComparator(RowKeys
			.getRecommendedItemKey(recommender))));

	Scan scan = new Scan().addFamily(EVENTLOG_COLUMN_RECOMMENDED).setFilter(filters);
	return deleteAll(scan, cp, EVENTLOG_COLUMN_RECOMMENDED);
}
 
开发者ID:beeldengeluid,项目名称:zieook,代码行数:12,代码来源:EventLogTable.java



注:本文中的org.apache.hadoop.hbase.filter.BinaryPrefixComparator类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Java FileSystemResourceAccessor类代码示例发布时间:2022-05-21
下一篇:
Java LineNumberFactory类代码示例发布时间:2022-05-21
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap