本文整理汇总了Java中org.eclipse.rdf4j.query.UpdateExecutionException类的典型用法代码示例。如果您正苦于以下问题:Java UpdateExecutionException类的具体用法?Java UpdateExecutionException怎么用?Java UpdateExecutionException使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
UpdateExecutionException类属于org.eclipse.rdf4j.query包,在下文中一共展示了UpdateExecutionException类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: checkGraph
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
protected void checkGraph(RepositoryConnection connection, final List<RDFDataUnit.Entry> entries) throws DPUException {
SparqlUtils.SparqlAskObject ask = null;
try {
ask = SparqlUtils.createAsk(config.getAskQuery(), entries);
SparqlUtils.execute(connection, ask);
} catch (RepositoryException | MalformedQueryException | UpdateExecutionException | QueryEvaluationException | SparqlProblemException | DataUnitException e) {
throw new DPUException(e.getLocalizedMessage(), e);
}
if (!ask.result) {
reportFailure();
}
else {
//everything OK:
ContextUtils.sendShortInfo(ctx, "rdfvalidation.finished.ok");
}
}
开发者ID:UnifiedViews,项目名称:Plugins,代码行数:20,代码来源:SparqlAsk.java
示例2: testMultipleReposWithDifferentUsers
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
@Test
public void testMultipleReposWithDifferentUsers() throws RepositoryException, MalformedQueryException, UpdateExecutionException {
readerRep.initialize();
MarkLogicRepositoryConnection testReaderCon = readerRep.getConnection();
exception.expect(Exception.class);
testReaderCon.prepareUpdate("CREATE GRAPH <abc>").execute();
writerRep.initialize();
MarkLogicRepositoryConnection testWriterCon = writerRep.getConnection();
testWriterCon.prepareUpdate("CREATE GRAPH <abcdef10>").execute();
writerRep.shutDown();
readerRep.shutDown();
}
开发者ID:marklogic,项目名称:marklogic-rdf4j,代码行数:14,代码来源:MarkLogicRepositoryTest.java
示例3: updateWithWrongPerms
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
@Test
public void updateWithWrongPerms() throws RepositoryException, MalformedQueryException, UpdateExecutionException {
readerRep.initialize();
MarkLogicRepositoryConnection testReaderCon = readerRep.getConnection();
exception.expect(UpdateExecutionException.class);
testReaderCon.prepareUpdate("CREATE GRAPH <abc>").execute();
}
开发者ID:marklogic,项目名称:marklogic-rdf4j,代码行数:8,代码来源:MarkLogicExceptionsTest.java
示例4: execUpdate
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
public void execUpdate() throws OData2SparqlException {
try {
super.connection = rdfRoleRepository.getRepository().getConnection();
log.info( super.query);
updateQuery = connection.prepareUpdate(QueryLanguage.SPARQL, super.query);
updateQuery.execute();
} catch (RepositoryException | MalformedQueryException |UpdateExecutionException e) {
log.error( e.getMessage());
throw new OData2SparqlException("RdfUpdate execUpdate failure",e);
}
}
开发者ID:peterjohnlawrence,项目名称:com.inova8.odata2sparql.v4,代码行数:13,代码来源:RdfUpdate.java
示例5: queryUpdate
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
/**
* Execute SPARQL Update query
*
* @param sparql
* @throws RepositoryException
*/
public void queryUpdate(String sparql) throws RepositoryException {
try {
Update upd = conn.prepareUpdate(QueryLanguage.SPARQL, sparql);
upd.execute();
} catch (MalformedQueryException | UpdateExecutionException ex) {
throw new RepositoryException(ex);
}
}
开发者ID:Fedict,项目名称:dcattools,代码行数:15,代码来源:Storage.java
示例6: executeUpdateQuery
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
/**
* Execute given query.
*
* @param query
* @param sourceEntries
* USING graphs.
* @param targetGraph
* WITH graphs.
* @param connection
* @throws eu.unifiedviews.dpu.DPUException
* @throws eu.unifiedviews.dataunit.DataUnitException
*/
protected void executeUpdateQuery(String query, final List<RDFDataUnit.Entry> sourceEntries,
IRI targetGraph,
RepositoryConnection connection) throws DPUException, DataUnitException {
// Prepare query.
if (!useDataset()) {
if (Pattern.compile(Pattern.quote("DELETE"), Pattern.CASE_INSENSITIVE).matcher(query).find()) {
query = query.replaceFirst("(?i)DELETE", prepareWithClause(targetGraph) + " DELETE");
} else {
query = query.replaceFirst("(?i)INSERT", prepareWithClause(targetGraph) + " INSERT");
}
query = query.replaceFirst("(?i)WHERE", prepareUsingClause(sourceEntries) + "WHERE");
}
LOG.debug("Query to execute: {}", query);
try {
// Execute query.
final Update update = connection.prepareUpdate(QueryLanguage.SPARQL, query);
if (useDataset()) {
final DatasetImpl dataset = new DatasetImpl();
for (RDFDataUnit.Entry entry : sourceEntries) {
dataset.addDefaultGraph(entry.getDataGraphURI());
}
dataset.addDefaultRemoveGraph(targetGraph);
dataset.setDefaultInsertGraph(targetGraph);
update.setDataset(dataset);
}
update.execute();
} catch (RepositoryException | MalformedQueryException | UpdateExecutionException ex) {
throw ContextUtils.dpuException(ctx, ex, "SparqlConstruct.execute.exception.updateExecute");
}
}
开发者ID:UnifiedViews,项目名称:Plugins,代码行数:43,代码来源:SparqlConstruct.java
示例7: copyMetadata
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
@Override
public void copyMetadata(String symbolicName) throws DataUnitException {
try {
if (connection == null) {
connection = source.getConnection();
}
// Select all triples <bnode> symbolicName "symbolicName"
// add all of them to destination data unit
// (we use source connection - both run on same storage).
final Update update = connection.prepareUpdate(
QueryLanguage.SPARQL, UPDATE);
update.setBinding(SYMBOLIC_NAME_BINDING,
connection.getValueFactory().createLiteral(symbolicName));
final SimpleDataset dataset = new SimpleDataset();
for (IRI item : source.getMetadataGraphnames()) {
dataset.addDefaultGraph(item);
}
dataset.setDefaultInsertGraph(
destination.getMetadataWriteGraphname());
update.setDataset(dataset);
update.execute();
} catch (RepositoryException | UpdateExecutionException | MalformedQueryException ex) {
throw new DataUnitException("", ex);
}
}
开发者ID:UnifiedViews,项目名称:Plugin-DevEnv,代码行数:30,代码来源:CopyHelpers.java
示例8: flush
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
/**
* Flushes the cache to the server, writing triples as graph.
*
* @throws MarkLogicRdf4jException
*/
protected synchronized void flush() throws RepositoryException, MalformedQueryException, UpdateExecutionException, IOException {
if (cache.isEmpty()) { return; }
StringBuffer entireQuery = new StringBuffer();
SPARQLQueryBindingSet bindingSet = new SPARQLQueryBindingSet();
for (Namespace ns :cache.getNamespaces()){
entireQuery.append("PREFIX "+ns.getPrefix()+": <"+ns.getName()+">. ");
}
entireQuery.append("INSERT DATA { ");
Set<Resource> distinctCtx = new HashSet<Resource>();
for (Resource context :cache.contexts()) {
distinctCtx.add(context);
}
for (Resource ctx : distinctCtx) {
if (ctx != null) {
entireQuery.append(" GRAPH <" + ctx + "> { ");
}
for (Statement stmt : cache.filter(null, null, null, ctx)) {
if (stmt.getSubject() instanceof org.eclipse.rdf4j.model.BNode) {
entireQuery.append("<http://marklogic.com/semantics/blank/" + stmt.getSubject().stringValue() + "> ");
}else {
entireQuery.append("<" + stmt.getSubject().stringValue() + "> ");
}
if (stmt.getPredicate() instanceof org.eclipse.rdf4j.model.BNode) {
entireQuery.append("<http://marklogic.com/semantics/blank/" + stmt.getPredicate().stringValue() + "> ");
}else{
entireQuery.append("<" + stmt.getPredicate().stringValue() + "> ");
}
Value object=stmt.getObject();
if (object instanceof Literal) {
Literal lit = (Literal) object;
entireQuery.append("\"");
entireQuery.append(SPARQLUtil.encodeString(lit.getLabel()));
entireQuery.append("\"");
if(null == lit.getLanguage().orElse(null)) {
entireQuery.append("^^<" + lit.getDatatype().stringValue() + ">");
}else{
entireQuery.append("@" + lit.getLanguage().orElse(null));
}
} else {
if (stmt.getObject() instanceof org.eclipse.rdf4j.model.BNode) {
entireQuery.append("<http://marklogic.com/semantics/blank/" + stmt.getObject().stringValue() + "> ");
}else {
entireQuery.append("<" + object.stringValue() + "> ");
}
}
entireQuery.append(".");
}
if (ctx != null) {
entireQuery.append(" }");
}
}
entireQuery.append("} ");
log.debug(entireQuery.toString());
client.sendUpdateQuery(entireQuery.toString(),bindingSet,false,null);
lastCacheAccess = new Date();
log.debug("success writing cache: {}",String.valueOf(cache.size()));
cache.clear();
}
开发者ID:marklogic,项目名称:marklogic-rdf4j,代码行数:70,代码来源:TripleWriteCache.java
示例9: flush
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
/**
* Flushes the cache to the server, writing triples as graph.
*
* @throws MarkLogicRdf4jException
*/
protected synchronized void flush() throws RepositoryException, MalformedQueryException, UpdateExecutionException, IOException {
if (cache.isEmpty()) { return; }
StringBuffer entireQuery = new StringBuffer();
SPARQLQueryBindingSet bindingSet = new SPARQLQueryBindingSet();
for (Namespace ns :cache.getNamespaces()){
entireQuery.append("PREFIX "+ns.getPrefix()+": <"+ns.getName()+">. ");
}
entireQuery.append("DELETE DATA { ");
Set<Resource> distinctCtx = new HashSet<Resource>();
for (Resource context :cache.contexts()) {
distinctCtx.add(context);
}
for (Resource ctx : distinctCtx) {
if (ctx != null) {
entireQuery.append(" GRAPH <" + ctx + "> { ");
}
for (Statement stmt : cache.filter(null, null, null, ctx)) {
entireQuery.append("<" + stmt.getSubject().stringValue() + "> ");
entireQuery.append("<" + stmt.getPredicate().stringValue() + "> ");
Value object=stmt.getObject();
if (object instanceof Literal) {
Literal lit = (Literal) object;
entireQuery.append("\"");
entireQuery.append(SPARQLUtil.encodeString(lit.getLabel()));
entireQuery.append("\"");
if(null == lit.getLanguage().orElse(null)) {
entireQuery.append("^^<" + lit.getDatatype().stringValue() + ">");
}else{
entireQuery.append("@" + lit.getLanguage().toString());
}
} else {
entireQuery.append("<" + object.stringValue() + "> ");
}
entireQuery.append(".");
}
if (ctx != null) {
entireQuery.append(" }");
}
}
entireQuery.append("} ");
log.info(entireQuery.toString());
client.sendUpdateQuery(entireQuery.toString(),bindingSet,false,null);
lastCacheAccess = new Date();
//log.info("success writing cache: {}",String.valueOf(cache.size()));
cache.clear();
}
开发者ID:marklogic,项目名称:marklogic-rdf4j,代码行数:58,代码来源:TripleDeleteCache.java
示例10: TestMalformedUpdateQUery
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
@Test
public void TestMalformedUpdateQUery() throws RepositoryException, MalformedQueryException, UpdateExecutionException {
exception.expect(UpdateExecutionException.class);
conn.prepareUpdate("A malformed query").execute();
}
开发者ID:marklogic,项目名称:marklogic-rdf4j,代码行数:6,代码来源:MarkLogicExceptionsTest.java
示例11: execute
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
@Override
protected void execute(
UpdateExpr updateExpr, Dataset dataset, BindingSet bindings, boolean includeInferred, int maxExecutionTime
) throws UpdateExecutionException {
throw new UpdateExecutionException("This repository is read only");
}
开发者ID:askplatypus,项目名称:platypus-kb-lucene,代码行数:7,代码来源:SimpleQueryPreparer.java
示例12: flush
import org.eclipse.rdf4j.query.UpdateExecutionException; //导入依赖的package包/类
protected abstract void flush() throws RepositoryException, MalformedQueryException, UpdateExecutionException, IOException;
开发者ID:marklogic,项目名称:marklogic-rdf4j,代码行数:2,代码来源:TripleCache.java
注:本文中的org.eclipse.rdf4j.query.UpdateExecutionException类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论