本文整理汇总了Java中org.openrdf.query.parser.sparql.SPARQLParser类的典型用法代码示例。如果您正苦于以下问题:Java SPARQLParser类的具体用法?Java SPARQLParser怎么用?Java SPARQLParser使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
SPARQLParser类属于org.openrdf.query.parser.sparql包,在下文中一共展示了SPARQLParser类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: run
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
public void run() throws MalformedQueryException, QueryEvaluationException, NotEnoughResultsException, SailException {
CloseableIteration<? extends BindingSet, QueryEvaluationException> it = null;
try {
// Execute the query.
final SPARQLParser sparqlParser = new SPARQLParser();
final ParsedQuery parsedQuery = sparqlParser.parseQuery(sparql, null);
it = sailConn.evaluate(parsedQuery.getTupleExpr(), null, null, false);
// Perform the reads.
if(numReads.isPresent()) {
read(it, numReads.get() );
} else {
readAll(it);
}
} finally {
if(it != null) {
it.close();
}
}
}
开发者ID:apache,项目名称:incubator-rya,代码行数:22,代码来源:QueryBenchmark.java
示例2: testStatementPatternContext
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
public void testStatementPatternContext() throws Exception {
String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" +
" PREFIX ub: <urn:lubm:rdfts#>\n" +
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" +
" SELECT * WHERE\n" +
" {\n" +
" GRAPH ub:g1 {\n" +
"\t?x rdf:type ub:UndergraduateStudent\n" +
" }\n" +
" }\n" +
"";
QueryParser parser = new SPARQLParser();
ParsedQuery parsedQuery = parser.parseQuery(query, null);
// System.out.println(parsedQuery);
SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
visitor.setTablePrefix(tablePrefix);
visitor.setInstance(instance);
visitor.setZk(zk);
visitor.setUser(user);
visitor.setPassword(password);
visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
// System.out.println(visitor.getPigScript());
}
开发者ID:apache,项目名称:incubator-rya,代码行数:26,代码来源:SparqlToPigTransformVisitorTest.java
示例3: testJoin
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
public void testJoin() throws Exception {
String query = "select * where {\n" +
"?subj <urn:lubm:rdfts#name> 'Department0'.\n" +
"?subj <urn:lubm:rdfts#subOrganizationOf> <http://www.University0.edu>.\n" +
"}";
// System.out.println(query);
QueryParser parser = new SPARQLParser();
ParsedQuery parsedQuery = parser.parseQuery(query, null);
// System.out.println(parsedQuery);
SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
visitor.setTablePrefix(tablePrefix);
visitor.setInstance(instance);
visitor.setZk(zk);
visitor.setUser(user);
visitor.setPassword(password);
visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
// System.out.println(visitor.getPigScript());
}
开发者ID:apache,项目名称:incubator-rya,代码行数:21,代码来源:SparqlToPigTransformVisitorTest.java
示例4: testMutliReturnJoin
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
public void testMutliReturnJoin() throws Exception {
String query = "select * where {\n" +
"?subj <urn:lubm:rdfts#name> 'Department0'.\n" +
"?subj <urn:lubm:rdfts#subOrganizationOf> ?suborg.\n" +
"}";
QueryParser parser = new SPARQLParser();
ParsedQuery parsedQuery = parser.parseQuery(query, null);
System.out.println(query);
SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
visitor.setTablePrefix(tablePrefix);
visitor.setInstance(instance);
visitor.setZk(zk);
visitor.setUser(user);
visitor.setPassword(password);
visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
System.out.println(visitor.getPigScript());
}
开发者ID:apache,项目名称:incubator-rya,代码行数:20,代码来源:SparqlToPigTransformVisitorTest.java
示例5: testMutlipleJoins
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
public void testMutlipleJoins() throws Exception {
String query = "select * where {\n" +
"?subj <urn:lubm:rdfts#name> 'Department0'.\n" +
"?subj <urn:lubm:rdfts#subOrganizationOf> <http://www.University0.edu>.\n" +
"?subj <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <urn:lubm:rdfts#Department>.\n" +
"}";
// System.out.println(query);
QueryParser parser = new SPARQLParser();
ParsedQuery parsedQuery = parser.parseQuery(query, null);
// System.out.println(parsedQuery);
SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
visitor.setTablePrefix(tablePrefix);
visitor.setInstance(instance);
visitor.setZk(zk);
visitor.setUser(user);
visitor.setPassword(password);
visitor.meet(new QueryRoot(parsedQuery.getTupleExpr()));
// System.out.println(visitor.getPigScript());
}
开发者ID:apache,项目名称:incubator-rya,代码行数:22,代码来源:SparqlToPigTransformVisitorTest.java
示例6: createSingleMongoMetadataNode
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void createSingleMongoMetadataNode() throws MalformedQueryException {
MongoDBRdfConfiguration conf = (MongoDBRdfConfiguration) getConf(true);
Set<RyaURI> propertySet = new HashSet<>();
propertySet.add(new RyaURI("http://createdBy"));
conf.setStatementMetadataProperties(propertySet);
StatementMetadataExternalSetProvider metaProvider = new StatementMetadataExternalSetProvider(conf);
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq = parser.parseQuery(query, null);
List<QueryModelNode> patterns = new ArrayList<>();
List<StatementMetadataNode<?>> expected = new ArrayList<>();
Set<StatementPattern> sp = StatementMetadataTestUtils.getMetadataStatementPatterns(pq.getTupleExpr(), propertySet);
patterns.addAll(StatementPatternCollector.process(pq.getTupleExpr()));
JoinSegment<StatementMetadataNode<?>> segment = new JoinSegment<>(
new HashSet<>(patterns), patterns, new HashMap<ValueExpr, Filter>());
List<StatementMetadataNode<?>> extSets = metaProvider.getExternalSets(segment);
expected.add(new StatementMetadataNode<>(sp,conf));
Assert.assertEquals(expected, extSets);
}
开发者ID:apache,项目名称:incubator-rya,代码行数:26,代码来源:StatementMetadataExternalSetProviderTest.java
示例7: testReOrderedBasicOptional
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testReOrderedBasicOptional() throws MalformedQueryException {
String query = ""//
+ "SELECT ?e ?c ?l" //
+ "{" //
+ " ?e a ?c . "//
+ " OPTIONAL{?e <uri:talksTo> ?l } . "//
+ " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l "//
+ "}";//
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq = parser.parseQuery(query, null);
System.out.println(pq.getTupleExpr());
List<TupleExpr> joinArgs = getJoinArgs(pq.getTupleExpr(),
new ArrayList<TupleExpr>());
// System.out.println(joinArgs);
FlattenedOptional optional = (FlattenedOptional) joinArgs.get(0);
TupleExpr sp1 = joinArgs.get(1);
TupleExpr sp2 = joinArgs.get(2);
Assert.assertEquals(false, optional.canRemoveTuple(sp1));
Assert.assertEquals(false, optional.canAddTuple(sp2));
}
开发者ID:apache,项目名称:incubator-rya,代码行数:27,代码来源:FlattenedOptionalTest.java
示例8: testPeriodicNodePlacement
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testPeriodicNodePlacement() throws MalformedQueryException {
String query = "prefix function: <http://org.apache.rya/function#> " //n
+ "prefix time: <http://www.w3.org/2006/time#> " //n
+ "prefix fn: <http://www.w3.org/2006/fn#> " //n
+ "select ?obs ?time ?lat where {" //n
+ "Filter(function:periodic(?time, 12.0, 6.0,time:hours)) " //n
+ "Filter(fn:test(?lat, 25)) " //n
+ "?obs <uri:hasTime> ?time. " //n
+ "?obs <uri:hasLattitude> ?lat }"; //n
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq = parser.parseQuery(query, null);
TupleExpr te = pq.getTupleExpr();
te.visit(new PeriodicQueryNodeVisitor());
PeriodicNodeCollector collector = new PeriodicNodeCollector();
te.visit(collector);
PeriodicQueryNode node2 = new PeriodicQueryNode(12*60*60*1000L, 6*3600*1000L, TimeUnit.MILLISECONDS, "time", new Join());
Assert.assertEquals(true, periodicNodesEqualIgnoreArg(node2, collector.getPeriodicQueryNode()));
}
开发者ID:apache,项目名称:incubator-rya,代码行数:25,代码来源:PeriodicQueryUtilTest.java
示例9: testConstructGraph
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testConstructGraph() throws MalformedQueryException, UnsupportedEncodingException {
String query = "select ?x where { ?x <uri:talksTo> <uri:Bob>. ?y <uri:worksAt> ?z }";
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq = parser.parseQuery(query, null);
List<StatementPattern> patterns = StatementPatternCollector.process(pq.getTupleExpr());
ConstructGraph graph = new ConstructGraph(patterns);
QueryBindingSet bs = new QueryBindingSet();
bs.addBinding("x", vf.createURI("uri:Joe"));
bs.addBinding("y", vf.createURI("uri:Bob"));
bs.addBinding("z", vf.createURI("uri:BurgerShack"));
VisibilityBindingSet vBs = new VisibilityBindingSet(bs,"FOUO");
Set<RyaStatement> statements = graph.createGraphFromBindingSet(vBs);
RyaStatement statement1 = new RyaStatement(new RyaURI("uri:Joe"), new RyaURI("uri:talksTo"), new RyaURI("uri:Bob"));
RyaStatement statement2 = new RyaStatement(new RyaURI("uri:Bob"), new RyaURI("uri:worksAt"), new RyaURI("uri:BurgerShack"));
Set<RyaStatement> expected = Sets.newHashSet(Arrays.asList(statement1, statement2));
expected.forEach(x-> x.setColumnVisibility("FOUO".getBytes()));
ConstructGraphTestUtils.ryaStatementSetsEqualIgnoresTimestamp(expected, statements);
}
开发者ID:apache,项目名称:incubator-rya,代码行数:23,代码来源:ConstructGraphTest.java
示例10: makeUnchainedQuery
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
private static TupleExpr makeUnchainedQuery(final BenchmarkParams params) throws MalformedQueryException {
final Queue<String> varQueue= Lists.newLinkedList(variables);
final SPARQLParser parser = new SPARQLParser();
final List<String> queryVars = new ArrayList<>();
// The first statement pattern has two variables.
queryVars.add( varQueue.remove() );
queryVars.add( varQueue.remove() );
// The each extra statement pattern joins with the previous one, so only need one more variable each.
for(int i = 1; i < params.getQuerySPCount(); i++) {
queryVars.add( varQueue.remove() );
queryVars.add( varQueue.remove() );
}
final String sparql = buildUnchainedSPARQL(queryVars);
return parser.parseQuery(sparql, null).getTupleExpr();
}
开发者ID:apache,项目名称:incubator-rya,代码行数:20,代码来源:PCJOptimizerBenchmark.java
示例11: AccumuloIndexSet
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
/**
*
* @param accCon
* - connection to a valid Accumulo instance
* @param tablename
* - name of an existing PCJ table
* @throws MalformedQueryException
* @throws SailException
* @throws QueryEvaluationException
* @throws TableNotFoundException
* @throws AccumuloSecurityException
* @throws AccumuloException
* @throws PCJStorageException
*/
public AccumuloIndexSet(final Configuration conf, final String tablename)
throws MalformedQueryException, SailException,
QueryEvaluationException, TableNotFoundException, AccumuloException, AccumuloSecurityException, PCJStorageException {
this.tablename = tablename;
this.accCon = ConfigUtils.getConnector(conf);
this.auths = getAuthorizations(conf);
PcjMetadata meta = pcj.getPcjMetadata(accCon, tablename);
final SPARQLParser sp = new SPARQLParser();
final ParsedTupleQuery pq = (ParsedTupleQuery) sp.parseQuery(meta.getSparql(), null);
setProjectionExpr((Projection) pq.getTupleExpr());
final Set<VariableOrder> orders = meta.getVarOrders();
varOrder = Lists.newArrayList();
for (final VariableOrder var : orders) {
varOrder.add(var.toString());
}
setLocalityGroups(tablename, accCon, varOrder);
this.setSupportedVariableOrderMap(varOrder);
}
开发者ID:apache,项目名称:incubator-rya,代码行数:35,代码来源:AccumuloIndexSet.java
示例12: MongoPcjQueryNode
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
/**
* Creates a new {@link MongoPcjQueryNode}.
*
* @param sparql - sparql query whose results will be stored in PCJ document. (not empty of null)
* @param pcjId - name of an existing PCJ. (not empty or null)
* @param pcjDocs - {@link MongoPcjDocuments} used to maintain PCJs in mongo. (not null)
*
* @throws MalformedQueryException - The SPARQL query needs to contain a projection.
*/
public MongoPcjQueryNode(final String sparql, final String pcjId, final MongoPcjDocuments pcjDocs) throws MalformedQueryException {
checkArgument(!Strings.isNullOrEmpty(sparql));
checkArgument(!Strings.isNullOrEmpty(pcjId));
this.pcjDocs = checkNotNull(pcjDocs);
this.pcjId = pcjId;
final SPARQLParser sp = new SPARQLParser();
final ParsedTupleQuery pq = (ParsedTupleQuery) sp.parseQuery(sparql, null);
final TupleExpr te = pq.getTupleExpr();
Preconditions.checkArgument(PCJOptimizerUtilities.isPCJValid(te), "TupleExpr is an invalid PCJ.");
final Optional<Projection> projection = new ParsedQueryUtil().findProjection(pq);
if (!projection.isPresent()) {
throw new MalformedQueryException("SPARQL query '" + sparql + "' does not contain a Projection.");
}
setProjectionExpr(projection.get());
}
开发者ID:apache,项目名称:incubator-rya,代码行数:26,代码来源:MongoPcjQueryNode.java
示例13: tupleReArrangeTest3
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void tupleReArrangeTest3() throws MalformedQueryException {
String queryString = ""//
+ "SELECT ?a ?b ?c ?d ?e ?x ?y" //
+ "{" //
+ " Filter(?c = <uri:label2>)" //
+ " Filter(?x = <uri:somethingFunny>) "//
+ " ?e <uri:laughsAt> ?x ." //
+ " ?e <uri:livesIn> ?y . "//
+ "{ ?a a ?b . ?a <http://www.w3.org/2000/01/rdf-schema#label> ?c }"//
+ " UNION { ?a <uri:talksTo> ?d . ?a <http://www.w3.org/2000/01/rdf-schema#label> ?e }"//
+ "}";//
SPARQLParser sp = new SPARQLParser();
ParsedQuery pq = sp.parseQuery(queryString, null);
List<TupleExpr> tuples = TupleReArranger.getTupleReOrderings(pq.getTupleExpr());
Assert.assertEquals(24, tuples.size());
}
开发者ID:apache,项目名称:incubator-rya,代码行数:22,代码来源:TupleReArrangerTest.java
示例14: tupleReArrangeTest4
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void tupleReArrangeTest4() throws MalformedQueryException {
String queryString = ""//
+ "SELECT ?a ?b ?c ?d ?e ?x ?y" //
+ "{" //
+ " Filter(?c = <uri:label2>)" //
+ " Filter(?x = <uri:somethingFunny>) "//
+ " Filter(?d = <uri:Fred> ) " //
+ " ?e <uri:laughsAt> ?x ." //
+ " ?e <uri:livesIn> ?y . "//
+ "{ ?a a ?b . ?a <http://www.w3.org/2000/01/rdf-schema#label> ?c }"//
+ " UNION { ?a <uri:talksTo> ?d . ?a <http://www.w3.org/2000/01/rdf-schema#label> ?e }"//
+ "}";//
SPARQLParser sp = new SPARQLParser();
ParsedQuery pq = sp.parseQuery(queryString, null);
TupleExpr te = pq.getTupleExpr();
new FilterOptimizer().optimize(te, null, null);
System.out.println(te);
List<TupleExpr> tuples = TupleReArranger.getTupleReOrderings(te);
System.out.println(tuples);
Assert.assertEquals(24, tuples.size());
}
开发者ID:apache,项目名称:incubator-rya,代码行数:27,代码来源:TupleReArrangerTest.java
示例15: testCost1
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testCost1() throws Exception {
String q1 = ""//
+ "SELECT ?f ?m ?d ?h ?i " //
+ "{" //
+ " ?f a ?m ."//
+ " ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+ " ?d <uri:talksTo> ?f . "//
+ " ?d <uri:hangOutWith> ?f ." //
+ " ?f <uri:hangOutWith> ?h ." //
+ " ?f <uri:associatesWith> ?i ." //
+ " ?i <uri:associatesWith> ?h ." //
+ "}";//
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq1 = parser.parseQuery(q1, null);
ThreshholdPlanSelector tps = new ThreshholdPlanSelector(
pq1.getTupleExpr());
double cost = tps.getCost(pq1.getTupleExpr(), .6, .4, 0);
Assert.assertEquals(.7, cost, .01);
}
开发者ID:apache,项目名称:incubator-rya,代码行数:26,代码来源:ThreshholdPlanSelectorTest.java
示例16: testCost2
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testCost2() throws Exception {
String q1 = ""//
+ "SELECT ?f ?m ?d ?e ?l ?c " //
+ "{" //
+ " ?f a ?m ."//
+ " ?e a ?l ."//
+ " ?d <uri:talksTo> ?f . "//
+ " ?c <uri:talksTo> ?e . "//
+ " ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+ " ?l <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+ "}";//
SPARQLParser parser = new SPARQLParser();
ParsedQuery pq1 = parser.parseQuery(q1, null);
ThreshholdPlanSelector tps = new ThreshholdPlanSelector(
pq1.getTupleExpr());
double cost = tps.getCost(pq1.getTupleExpr(), .4, .3, .3);
Assert.assertEquals(.58, cost, .000000001);
}
开发者ID:apache,项目名称:incubator-rya,代码行数:25,代码来源:ThreshholdPlanSelectorTest.java
示例17: testConstantGraph
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testConstantGraph() throws Exception {
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q20, null);
ParsedQuery pq2 = parser2.parseQuery(q21, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
Assert.assertTrue(normalize.size() == 1);
for (TupleExpr s : normalize) {
Assert.assertTrue(tupleEquals(s, pq1.getTupleExpr()));
}
}
开发者ID:apache,项目名称:incubator-rya,代码行数:19,代码来源:QueryVariableNormalizerTest.java
示例18: testSameTuples
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
/**
* @throws Exception
* Tests QueryVariableNormalizerContext with same query passed
* in as query and index. Tests that only one index is produced
* and that it equals original query.
*/
@Test
public void testSameTuples() throws Exception {
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q11, null);
ParsedQuery pq2 = parser2.parseQuery(q11, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
Assert.assertTrue(normalize.size() == 1);
Assert.assertTrue(normalize.get(0).equals(pq1.getTupleExpr()) && normalize.get(0).equals(pq2.getTupleExpr()));
}
开发者ID:apache,项目名称:incubator-rya,代码行数:23,代码来源:QueryVariableNormalizerTest.java
示例19: testNoConstants
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
/**
* @throws Exception
* Tests QueryVariableNormalizerContext with two queries whose
* StatementPattern nodes contain no constant Vars.
*/
@Test
public void testNoConstants() throws Exception {
SPARQLParser parser1 = new SPARQLParser();
SPARQLParser parser2 = new SPARQLParser();
ParsedQuery pq1 = parser1.parseQuery(q15, null);
ParsedQuery pq2 = parser2.parseQuery(q16, null);
List<TupleExpr> normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
pq2.getTupleExpr());
Assert.assertEquals(2,normalize.size());
for (TupleExpr s : normalize) {
Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), s));
}
pq1 = parser1.parseQuery(q16, null);
pq2 = parser2.parseQuery(q17, null);
normalize = QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), pq2.getTupleExpr());
Assert.assertTrue(normalize.size() == 0);
}
开发者ID:apache,项目名称:incubator-rya,代码行数:30,代码来源:VarConstQueryVariableNormalizerTest.java
示例20: testGeoIndexFunction
import org.openrdf.query.parser.sparql.SPARQLParser; //导入依赖的package包/类
@Test
public void testGeoIndexFunction() throws Exception {
final SPARQLParser parser1 = new SPARQLParser();
final SPARQLParser parser2 = new SPARQLParser();
final ParsedQuery pq1 = parser1.parseQuery(q21, null);
final ParsedQuery pq2 = parser2.parseQuery(q23, null);
System.out.println("Query is " + pq1.getTupleExpr());
System.out.println("Index is " + pq2.getTupleExpr());
final SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
final List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
list.add(extTup);
final TupleExpr tup = pq1.getTupleExpr().clone();
final PCJOptimizer pcj = new PCJOptimizer(list, false, new AccumuloIndexSetProvider(new Configuration(), list));
pcj.optimize(tup, null, null);
System.out.println("Processed query is " + tup);
final Set<StatementPattern> qSet = Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr()));
final Set<QueryModelNode> eTupSet = PcjIntegrationTestingUtil.getTupleSets(tup);
final Set<StatementPattern> set = Sets.newHashSet();
Assert.assertEquals(1, eTupSet.size());
for (final QueryModelNode s : eTupSet) {
final Set<StatementPattern> tempSet = Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s)
.getTupleExpr()));
set.addAll(tempSet);
}
Assert.assertTrue(qSet.containsAll(set));
}
开发者ID:apache,项目名称:incubator-rya,代码行数:38,代码来源:PrecompJoinOptimizerVarToConstTest.java
注:本文中的org.openrdf.query.parser.sparql.SPARQLParser类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论