conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
import com.google.common.collect.ImmutableList;
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
import it.unibz.inf.ontop.model.*;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.*;
=======
import it.unibz.inf.ontop.model.BNode;
import it.unibz.inf.ontop.model.Function;
import it.unibz.inf.ontop.model.OBDADataFactory;
import it.unibz.inf.ontop.model.OBDAException;
import it.unibz.inf.ontop.model.OBDAMappingAxiom;
import it.unibz.inf.ontop.model.ObjectConstant;
import it.unibz.inf.ontop.model.Predicate;
>>>>>>>
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
import it.unibz.inf.ontop.model.BNode;
import it.unibz.inf.ontop.model.Function;
import it.unibz.inf.ontop.model.OBDADataFactory;
import it.unibz.inf.ontop.model.OBDAException;
import it.unibz.inf.ontop.model.OBDAMappingAxiom;
import it.unibz.inf.ontop.model.ObjectConstant;
import it.unibz.inf.ontop.model.Predicate;
<<<<<<<
import it.unibz.inf.ontop.ontology.OClass;
import it.unibz.inf.ontop.ontology.OntologyFactory;
import it.unibz.inf.ontop.ontology.impl.OntologyFactoryImpl;
import it.unibz.inf.ontop.ontology.impl.OntologyVocabularyImpl;
=======
import it.unibz.inf.ontop.ontology.OClass;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.Equivalences;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.EquivalencesDAG;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.Interval;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.SemanticIndexCache;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.SemanticIndexRange;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.TBoxReasoner;
>>>>>>>
import it.unibz.inf.ontop.ontology.OClass;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.Equivalences;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.EquivalencesDAG;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.Interval;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.SemanticIndexCache;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.SemanticIndexRange;
import it.unibz.inf.ontop.owlrefplatform.core.dagjgrapht.TBoxReasoner;
<<<<<<<
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
=======
import java.util.*;
>>>>>>>
import java.util.*;
<<<<<<<
public RDBMSSIRepositoryManager(TBoxReasoner reasonerDag, NativeQueryLanguageComponentFactory nativeQLFactory) {
=======
public RDBMSSIRepositoryManager(TBoxReasoner reasonerDag, ImmutableOntologyVocabulary voc) {
>>>>>>>
public RDBMSSIRepositoryManager(TBoxReasoner reasonerDag, ImmutableOntologyVocabulary voc,
NativeQueryLanguageComponentFactory nativeQLFactory) {
<<<<<<<
this.nativeQLFactory = nativeQLFactory;
=======
this.voc = voc;
>>>>>>>
this.voc = voc;
this.nativeQLFactory = nativeQLFactory;
<<<<<<<
CQIE targetQuery = constructTargetQuery(ope.getPredicate(), view.getId().getType1(), view.getId().getType2());
OBDAMappingAxiom basicmapping = nativeQLFactory.create(dfac.getSQLQuery(sourceQuery), targetQuery);
=======
List<Function> targetQuery = constructTargetQuery(ope.getPredicate(), view.getId().getType1(), view.getId().getType2());
OBDAMappingAxiom basicmapping = dfac.getRDBMSMappingAxiom(dfac.getSQLQuery(sourceQuery), targetQuery);
>>>>>>>
List<Function> targetQuery = constructTargetQuery(ope.getPredicate(), view.getId().getType1(), view.getId().getType2());
OBDAMappingAxiom basicmapping = nativeQLFactory.create(dfac.getSQLQuery(sourceQuery), targetQuery);
<<<<<<<
CQIE targetQuery = constructTargetQuery(dpe.getPredicate(), view.getId().getType1(), view.getId().getType2());
OBDAMappingAxiom basicmapping = nativeQLFactory.create(dfac.getSQLQuery(sourceQuery), targetQuery);
=======
List<Function> targetQuery = constructTargetQuery(dpe.getPredicate(), view.getId().getType1(), view.getId().getType2());
OBDAMappingAxiom basicmapping = dfac.getRDBMSMappingAxiom(dfac.getSQLQuery(sourceQuery), targetQuery);
>>>>>>>
List<Function> targetQuery = constructTargetQuery(dpe.getPredicate(), view.getId().getType1(), view.getId().getType2());
OBDAMappingAxiom basicmapping = nativeQLFactory.create(dfac.getSQLQuery(sourceQuery), targetQuery);
<<<<<<<
CQIE targetQuery = constructTargetQuery(classNode.getPredicate(), view.getId().getType1());
OBDAMappingAxiom basicmapping = nativeQLFactory.create(dfac.getSQLQuery(sourceQuery), targetQuery);
=======
List<Function> targetQuery = constructTargetQuery(classNode.getPredicate(), view.getId().getType1());
OBDAMappingAxiom basicmapping = dfac.getRDBMSMappingAxiom(dfac.getSQLQuery(sourceQuery), targetQuery);
>>>>>>>
List<Function> targetQuery = constructTargetQuery(classNode.getPredicate(), view.getId().getType1());
OBDAMappingAxiom basicmapping = nativeQLFactory.create(dfac.getSQLQuery(sourceQuery), targetQuery); |
<<<<<<<
import java.net.URI;
import java.util.LinkedList;
import java.util.List;
import javax.swing.AbstractListModel;
<<<<<<< HEAD:ontop-protege/src/main/java/it/unibz/inf/ontop/protege/gui/treemodels/SynchronizedMappingListModel.java
import it.unibz.inf.ontop.protege.core.OBDAMappingListener;
import it.unibz.inf.ontop.protege.core.OBDAModelWrapper;
import it.unibz.inf.ontop.model.OBDAMappingAxiom;
=======
=======
>>>>>>> |
<<<<<<<
@Nullable
private Boolean isDistinct;
=======
@Nullable
private ImmutableSet<ImmutableSubstitution<NonVariableTerm>> variableDefinition;
>>>>>>>
@Nullable
private ImmutableSet<ImmutableSubstitution<NonVariableTerm>> variableDefinition;
@Nullable
private Boolean isDistinct;
<<<<<<<
public IQTree removeDistincts() {
IQProperties properties = getProperties();
return properties.areDistinctAlreadyRemoved()
? this
: getRootNode().removeDistincts(getChildren(), properties);
}
@Override
=======
public IQTree replaceSubTree(IQTree subTreeToReplace, IQTree newSubTree) {
if (equals(subTreeToReplace))
return newSubTree;
ImmutableList<IQTree> newChildren = getChildren().stream()
.map(c -> c.replaceSubTree(subTreeToReplace, newSubTree))
.collect(ImmutableCollectors.toList());
return iqFactory.createNaryIQTree(getRootNode(), newChildren);
}
@Override
public ImmutableSet<ImmutableSubstitution<NonVariableTerm>> getPossibleVariableDefinitions() {
if (variableDefinition == null)
variableDefinition = getRootNode().getPossibleVariableDefinitions(getChildren());
return variableDefinition;
}
@Override
>>>>>>>
public IQTree replaceSubTree(IQTree subTreeToReplace, IQTree newSubTree) {
if (equals(subTreeToReplace))
return newSubTree;
ImmutableList<IQTree> newChildren = getChildren().stream()
.map(c -> c.replaceSubTree(subTreeToReplace, newSubTree))
.collect(ImmutableCollectors.toList());
return iqFactory.createNaryIQTree(getRootNode(), newChildren);
}
@Override
public ImmutableSet<ImmutableSubstitution<NonVariableTerm>> getPossibleVariableDefinitions() {
if (variableDefinition == null)
variableDefinition = getRootNode().getPossibleVariableDefinitions(getChildren());
return variableDefinition;
}
@Override
public IQTree removeDistincts() {
IQProperties properties = getProperties();
return properties.areDistinctAlreadyRemoved()
? this
: getRootNode().removeDistincts(getChildren(), properties);
}
@Override |
<<<<<<<
import it.unibz.inf.ontop.owlrefplatform.injection.QuestCoreConfiguration;
=======
import it.unibz.inf.ontop.owlrefplatform.core.optimization.IntermediateQueryOptimizer;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.TopDownSubstitutionLiftOptimizer;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.unfolding.QueryUnfolder;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.unfolding.impl.QueryUnfolderImpl;
>>>>>>>
import it.unibz.inf.ontop.owlrefplatform.core.optimization.unfolding.QueryUnfolder;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.unfolding.impl.QueryUnfolderImpl;
import it.unibz.inf.ontop.owlrefplatform.injection.QuestCoreConfiguration;
<<<<<<<
private static final Injector INJECTOR = QuestCoreConfiguration.defaultBuilder().build().getInjector();
=======
>>>>>>>
private static final Injector INJECTOR = QuestCoreConfiguration.defaultBuilder().build().getInjector();
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
<<<<<<<
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ATOM.getVariables(),
=======
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(),
>>>>>>>
IntermediateQueryBuilder subQueryBuilder = new DefaultIntermediateQueryBuilder(METADATA, INJECTOR);
ConstructionNode subQueryRoot = new ConstructionNodeImpl(P1_ST_ATOM.getVariables(), |
<<<<<<<
IQTree removeDistincts(ImmutableList<IQTree> children, IQProperties properties);
=======
NaryOperatorNode acceptNodeTransformer(HomogeneousQueryNodeTransformer transformer)
throws QueryNodeTransformationException;
ImmutableSet<ImmutableSubstitution<NonVariableTerm>> getPossibleVariableDefinitions(ImmutableList<IQTree> children);
>>>>>>>
NaryOperatorNode acceptNodeTransformer(HomogeneousQueryNodeTransformer transformer)
throws QueryNodeTransformationException;
ImmutableSet<ImmutableSubstitution<NonVariableTerm>> getPossibleVariableDefinitions(ImmutableList<IQTree> children);
IQTree removeDistincts(ImmutableList<IQTree> children, IQProperties properties); |
<<<<<<<
result = fac.getConstantLiteral(termRightName, COL_TYPE.INTEGER);
=======
result = factory.getConstantLiteral(termRightName, COL_TYPE.LONG);
>>>>>>>
result = fac.getConstantLiteral(termRightName, COL_TYPE.LONG);
<<<<<<<
result = fac.getConstantLiteral(termRightName, COL_TYPE.DATETIME);
=======
result = factory.getConstantLiteral(termRightName, COL_TYPE.DATE);
>>>>>>>
result = fac.getConstantLiteral(termRightName, COL_TYPE.DATE);
<<<<<<<
result = fac.getConstantLiteral(termRightName, COL_TYPE.DATETIME);
=======
result = factory.getConstantLiteral(termRightName, COL_TYPE.TIME);
>>>>>>>
result = fac.getConstantLiteral(termRightName, COL_TYPE.TIME); |
<<<<<<<
private final OWLStatement st;
=======
private OWLConnection conn;
>>>>>>>
private final OWLConnection conn;
<<<<<<<
OWLConnection conn = reasoner.getConnection();
st = conn.createStatement();
=======
conn = reasoner.getConnection();
>>>>>>>
conn = reasoner.getConnection();
<<<<<<<
try (TupleOWLResultSet res = st.executeSelectQuery(query)) {
int count = 0;
while (res.hasNext()) {
OWLBindingSet bindingSet = res.next();
LOGGER.info(bindingSet.toString());
count += 1;
}
return count;
=======
try(OWLStatement st = conn.createStatement();
TupleOWLResultSet res = st.executeSelectQuery(query)) {
int count = 0;
while (res.hasNext()) {
final OWLBindingSet bindingSet = res.next();
LOGGER.info(bindingSet.toString());
count += 1;
}
res.close();
return count;
>>>>>>>
try(OWLStatement st = conn.createStatement();
TupleOWLResultSet res = st.executeSelectQuery(query)) {
int count = 0;
while (res.hasNext()) {
final OWLBindingSet bindingSet = res.next();
LOGGER.info(bindingSet.toString());
count += 1;
}
res.close();
return count; |
<<<<<<<
=======
import com.fasterxml.jackson.databind.deser.ContextualDeserializer;
import com.fasterxml.jackson.databind.deser.NullValueProvider;
import com.fasterxml.jackson.databind.deser.impl.NullsConstantProvider;
>>>>>>>
import com.fasterxml.jackson.databind.deser.NullValueProvider;
import com.fasterxml.jackson.databind.deser.impl.NullsConstantProvider;
<<<<<<<
@SuppressWarnings("unchecked" )
=======
/**
* @since 2.7
* @deprecated Since 2.10.1
*/
@Deprecated
>>>>>>>
@SuppressWarnings("unchecked" )
<<<<<<<
return (EnumSet<?>) ctxt.handleUnexpectedToken(getValueType(ctxt), p);
=======
if (_skipNullValues) {
continue;
}
value = (Enum<?>) _nullProvider.getNullValue(ctxt);
} else {
value = _enumDeserializer.deserialize(p, ctxt);
>>>>>>>
if (_skipNullValues) {
continue;
}
value = (Enum<?>) _nullProvider.getNullValue(ctxt);
} else {
value = _enumDeserializer.deserialize(p, ctxt); |
<<<<<<<
import it.unibz.inf.ontop.model.term.functionsymbol.db.BnodeStringTemplateFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.DBFunctionSymbolFactory;
=======
import it.unibz.inf.ontop.model.term.functionsymbol.db.BnodeStringTemplateFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.DBFunctionSymbolFactory;
import it.unibz.inf.ontop.model.type.TermType;
import it.unibz.inf.ontop.model.type.TermTypeInference;
>>>>>>>
import it.unibz.inf.ontop.model.term.functionsymbol.db.BnodeStringTemplateFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.DBFunctionSymbolFactory;
<<<<<<<
import it.unibz.inf.ontop.spec.mapping.MappingMetadata;
import it.unibz.inf.ontop.spec.mapping.OBDASQLQuery;
import it.unibz.inf.ontop.spec.mapping.SQLMappingFactory;
import it.unibz.inf.ontop.spec.mapping.bootstrap.DirectMappingBootstrapper.BootstrappingResults;
import it.unibz.inf.ontop.spec.mapping.impl.SQLMappingFactoryImpl;
=======
import it.unibz.inf.ontop.spec.mapping.*;
import it.unibz.inf.ontop.spec.mapping.PrefixManager;
>>>>>>>
import it.unibz.inf.ontop.spec.mapping.*;
import it.unibz.inf.ontop.spec.mapping.PrefixManager;
import it.unibz.inf.ontop.spec.mapping.MappingMetadata;
import it.unibz.inf.ontop.spec.mapping.OBDASQLQuery;
import it.unibz.inf.ontop.spec.mapping.SQLMappingFactory;
import it.unibz.inf.ontop.spec.mapping.bootstrap.DirectMappingBootstrapper.BootstrappingResults;
import it.unibz.inf.ontop.spec.mapping.impl.SQLMappingFactoryImpl;
<<<<<<<
it.unibz.inf.ontop.spec.mapping.PrefixManager prefixManager = specificationFactory.createPrefixManager(ImmutableMap.of());
MappingMetadata mappingMetadata = specificationFactory.createMetadata(prefixManager);
=======
PrefixManager prefixManager = specificationFactory.createPrefixManager(ImmutableMap.of());
MappingMetadata mappingMetadata = specificationFactory.createMetadata(prefixManager);
>>>>>>>
PrefixManager prefixManager = specificationFactory.createPrefixManager(ImmutableMap.of());
MappingMetadata mappingMetadata = specificationFactory.createMetadata(prefixManager); |
<<<<<<<
import com.google.common.collect.Lists;
import it.unibz.krdb.config.tmappings.types.SimplePredicate;
=======
>>>>>>>
import com.google.common.collect.Lists;
import it.unibz.krdb.config.tmappings.types.SimplePredicate;
<<<<<<<
/** Davide> Exclude specific predicates from T-Mapping approach **/
private List<SimplePredicate> excludeFromTMappings = Lists.newArrayList();
/** Davide> Whether to exclude the user-supplied predicates from the
* TMapping procedure (that is, the mapping assertions for
* those predicates should not be extended according to the
* TBox hierarchies
*/
//private boolean applyExcludeFromTMappings;
=======
>>>>>>>
/** Davide> Exclude specific predicates from T-Mapping approach **/
private List<SimplePredicate> excludeFromTMappings = Lists.newArrayList();
/** Davide> Whether to exclude the user-supplied predicates from the
* TMapping procedure (that is, the mapping assertions for
* those predicates should not be extended according to the
* TBox hierarchies
*/
//private boolean applyExcludeFromTMappings;
<<<<<<<
/** Davide> Exclude specific predicates from T-Mapping approach **/
public void setExcludeFromTMappings(List<SimplePredicate> excludeFromTMappings){
assert(excludeFromTMappings != null);
this.excludeFromTMappings = excludeFromTMappings;
//this.applyExcludeFromTMappings = true;
}
=======
>>>>>>>
/** Davide> Exclude specific predicates from T-Mapping approach **/
public void setExcludeFromTMappings(List<SimplePredicate> excludeFromTMappings){
assert(excludeFromTMappings != null);
this.excludeFromTMappings = excludeFromTMappings;
//this.applyExcludeFromTMappings = true;
}
<<<<<<<
reformulationReasoner = new TBoxReasonerImpl(inputTBox);
Ontology reformulationOntology;
if (bOptimizeEquivalences) {
// Davide> TODO Remove from equivalenceMaps the predicates for which t-mappings are forbidden
// this is used to simplify the vocabulary of ABox assertions and mappings
equivalenceMaps = EquivalenceMap.getEquivalenceMap(reformulationReasoner);
=======
reformulationReasoner = new TBoxReasonerImpl(inputOntology);
if (bOptimizeEquivalences) {
>>>>>>>
reformulationReasoner = new TBoxReasonerImpl(inputOntology);
if (bOptimizeEquivalences) {
<<<<<<<
reformulationOntology = EquivalenceTBoxOptimizer.getOptimalTBox(reformulationReasoner,
equivalenceMaps, inputTBox.getVocabulary());
reformulationReasoner = new TBoxReasonerImpl(reformulationOntology);
} else {
equivalenceMaps = EquivalenceMap.getEmptyEquivalenceMap();
reformulationOntology = inputTBox;
}
// Set<Predicate> reformulationVocabulary = reformulationOntology.getVocabulary();
=======
reformulationReasoner = TBoxReasonerImpl.getEquivalenceSimplifiedReasoner(reformulationReasoner);
}
vocabularyValidator = new VocabularyValidator(reformulationReasoner);
>>>>>>>
reformulationReasoner = TBoxReasonerImpl.getEquivalenceSimplifiedReasoner(reformulationReasoner);
}
vocabularyValidator = new VocabularyValidator(reformulationReasoner);
<<<<<<<
// Davide> Option to disable T-Mappings (TODO: Test)
//if( tMappings ){
unfolder.applyTMappings(reformulationReasoner, true, excludeFromTMappings);
//}
// Adding ontology assertions (ABox) as rules (facts, head with no body).
unfolder.addABoxAssertionsAsFacts(inputTBox.getABox());
=======
// Apply TMappings
unfolder.applyTMappings(reformulationReasoner, true);
>>>>>>>
// Apply TMappings
//unfolder.applyTMappings(reformulationReasoner, true);
// Davide> Option to disable T-Mappings (TODO: Test)
//if( tMappings ){
unfolder.applyTMappings(reformulationReasoner, true, excludeFromTMappings);
//}
<<<<<<<
private void setupRewriter(TBoxReasoner reformulationR, Ontology sigma) {
if (reformulate == false) {
rewriter = new DummyReformulator();
} else if (QuestConstants.PERFECTREFORMULATION.equals(reformulationTechnique)) {
rewriter = new DLRPerfectReformulator();
} else if (QuestConstants.UCQBASED.equals(reformulationTechnique)) {
rewriter = new TreeRedReformulator();
} else if (QuestConstants.TW.equals(reformulationTechnique)) {
rewriter = new TreeWitnessRewriter();
} else {
throw new IllegalArgumentException("Invalid value for argument: " + QuestPreferences.REFORMULATION_TECHNIQUE);
}
rewriter.setTBox(reformulationR, sigma);
}
=======
>>>>>>> |
<<<<<<<
import it.unibz.inf.ontop.iq.transform.IQTransformer;
import it.unibz.inf.ontop.iq.visit.IQVisitor;
=======
import it.unibz.inf.ontop.iq.transform.IQTreeVisitingTransformer;
>>>>>>>
import it.unibz.inf.ontop.iq.transform.IQTreeVisitingTransformer;
import it.unibz.inf.ontop.iq.visit.IQVisitor; |
<<<<<<<
import it.unibz.inf.ontop.model.atom.AtomFactory;
import it.unibz.inf.ontop.model.term.*;
import it.unibz.inf.ontop.model.type.*;
import it.unibz.inf.ontop.model.vocabulary.RDF;
import it.unibz.inf.ontop.model.vocabulary.RDFS;
=======
import it.unibz.inf.ontop.iq.node.ConstructionNode;
import it.unibz.inf.ontop.model.atom.AtomPredicate;
import it.unibz.inf.ontop.model.term.ImmutableFunctionalTerm;
import it.unibz.inf.ontop.model.term.ImmutableTerm;
import it.unibz.inf.ontop.model.term.Variable;
import it.unibz.inf.ontop.model.term.functionsymbol.BNodePredicate;
import it.unibz.inf.ontop.model.term.functionsymbol.DatatypePredicate;
import it.unibz.inf.ontop.model.term.functionsymbol.Predicate;
import it.unibz.inf.ontop.model.term.functionsymbol.Predicate.COL_TYPE;
import it.unibz.inf.ontop.model.term.functionsymbol.URITemplatePredicate;
import it.unibz.inf.ontop.model.term.impl.PredicateImpl;
import it.unibz.inf.ontop.model.type.TermType;
>>>>>>>
import it.unibz.inf.ontop.iq.node.ConstructionNode;
import it.unibz.inf.ontop.model.atom.AtomFactory;
import it.unibz.inf.ontop.model.term.*;
import it.unibz.inf.ontop.model.type.*;
import it.unibz.inf.ontop.model.vocabulary.RDFS; |
<<<<<<<
import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.UriTemplateMatcher;
import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.VocabularyValidator;
=======
import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.VocabularyValidator;
>>>>>>>
import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.VocabularyValidator;
<<<<<<<
public Quest(Ontology tbox, OBDAModel mappings, DBMetadata metadata, Properties config) {
this(tbox, mappings, config);
this.metadata = metadata;
}
/** Davide> Exclude specific predicates from T-Mapping approach **/
public void setExcludeFromTMappings(TMappingExclusionConfig excludeFromTMappings){
assert(excludeFromTMappings != null);
this.excludeFromTMappings = excludeFromTMappings;
}
=======
>>>>>>>
/** Davide> Exclude specific predicates from T-Mapping approach **/
public void setExcludeFromTMappings(TMappingExclusionConfig excludeFromTMappings){
assert(excludeFromTMappings != null);
this.excludeFromTMappings = excludeFromTMappings;
}
<<<<<<<
//unfolder.applyTMappings(reformulationReasoner, true);
// Davide> Option to disable T-Mappings (TODO: Test)
//if( tMappings ){
unfolder.applyTMappings(reformulationReasoner, true, excludeFromTMappings);
//}
=======
unfolder.applyTMappings(reformulationReasoner, true, metadata);
>>>>>>>
//unfolder.applyTMappings(reformulationReasoner, true, metadata);
// Davide> Option to disable T-Mappings (TODO: Test)
//if( tMappings ){
unfolder.applyTMappings(reformulationReasoner, true, metadata, excludeFromTMappings);
//}
<<<<<<<
// TODO this code seems buggy, it will probably break easily (check the
// part with
// parenthesis in the beginning of the for loop.
Statement st = null;
try {
st = localConnection.createStatement();
for (OBDAMappingAxiom axiom : mappings) {
String sourceString = axiom.getSourceQuery().toString();
/*
* Check if the projection contains select all keyword, i.e.,
* 'SELECT * [...]'.
*/
if (containSelectAll(sourceString)) {
StringBuilder sb = new StringBuilder();
// If the SQL string has sub-queries in its statement
if (containChildParentSubQueries(sourceString)) {
int childquery1 = sourceString.indexOf("(");
int childquery2 = sourceString.indexOf(") AS child");
String childquery = sourceString.substring(childquery1 + 1, childquery2);
String copySourceQuery = createDummyQueryToFetchColumns(childquery, adapter);
if (st.execute(copySourceQuery)) {
ResultSetMetaData rsm = st.getResultSet().getMetaData();
boolean needComma = false;
for (int pos = 1; pos <= rsm.getColumnCount(); pos++) {
if (needComma) {
sb.append(", ");
}
String col = rsm.getColumnName(pos);
//sb.append("CHILD." + col );
sb.append("child.\"" + col + "\" AS \"child_" + (col)+"\"");
needComma = true;
}
}
sb.append(", ");
int parentquery1 = sourceString.indexOf(", (", childquery2);
int parentquery2 = sourceString.indexOf(") AS parent");
String parentquery = sourceString.substring(parentquery1 + 3, parentquery2);
copySourceQuery = createDummyQueryToFetchColumns(parentquery, adapter);
if (st.execute(copySourceQuery)) {
ResultSetMetaData rsm = st.getResultSet().getMetaData();
boolean needComma = false;
for (int pos = 1; pos <= rsm.getColumnCount(); pos++) {
if (needComma) {
sb.append(", ");
}
String col = rsm.getColumnName(pos);
//sb.append("PARENT." + col);
sb.append("parent.\"" + col + "\" AS \"parent_" + (col)+"\"");
needComma = true;
}
}
//If the SQL string doesn't have sub-queries
} else
{
String copySourceQuery = createDummyQueryToFetchColumns(sourceString, adapter);
if (st.execute(copySourceQuery)) {
ResultSetMetaData rsm = st.getResultSet().getMetaData();
boolean needComma = false;
for (int pos = 1; pos <= rsm.getColumnCount(); pos++) {
if (needComma) {
sb.append(", ");
}
sb.append("\"" + rsm.getColumnName(pos) + "\"");
needComma = true;
}
}
}
/*
* Replace the asterisk with the proper column names
*/
String columnProjection = sb.toString();
String tmp = axiom.getSourceQuery().toString();
int fromPosition = tmp.toLowerCase().indexOf("from");
int asteriskPosition = tmp.indexOf('*');
if (asteriskPosition != -1 && asteriskPosition < fromPosition) {
String str = sourceString.replaceFirst("\\*", columnProjection);
axiom.setSourceQuery(factory.getSQLQuery(str));
}
}
}
} finally {
if (st != null) {
st.close();
}
}
}
// Davide> TODO: Test
public void setQueryTimeout(Statement st) throws SQLException {
int timeout = 1200;
//int timeout = 30;
ConnClasses connClass = ConnClasses.fromString(localConnection.getClass().getName());
if(connClass == null){
st.setQueryTimeout(timeout);
return;
}
switch(connClass){
case DB2:
case MYSQL:
st.setQueryTimeout(timeout);
break;
case POSTGRES:
{
if( !timeoutSet ){
String query = String.format("SET statement_timeout TO %d", timeout*1000); // 1000ms = one second
st.execute(query);
timeoutSet = true;
}
break;
}
default:
st.setQueryTimeout(timeout);
break;
}
}
public void resetTimeouts(Statement st) throws SQLException {
ConnClasses connClass = ConnClasses.fromString(localConnection.getClass().toString());
if(connClass == null){
// TODO: check
return;
}
switch(connClass){
case MYSQL:
case DB2:
// Do nothing
break;
case POSTGRES:
{
String query = "RESET statement_timeout;";
st.execute(query);
break;
}
}
}
private static final String selectAllPattern = "(S|s)(E|e)(L|l)(E|e)(C|c)(T|t)\\s+\\*";
private static final String subQueriesPattern = "\\(.*\\)\\s+(A|a)(S|s)\\s+(C|c)(H|h)(I|i)(L|l)(D|d),\\s+\\(.*\\)\\s+(A|a)(S|s)\\s+(P|p)(A|a)(R|r)(E|e)(N|n)(T|t)";
private static boolean containSelectAll(String sql) {
final Pattern pattern = Pattern.compile(selectAllPattern);
return pattern.matcher(sql).find();
}
private static boolean containChildParentSubQueries(String sql) {
final Pattern pattern = Pattern.compile(subQueriesPattern);
return pattern.matcher(sql).find();
}
private static String createDummyQueryToFetchColumns(String originalQuery, SQLDialectAdapter adapter) {
String toReturn = String.format("select * from (%s) view20130219 ", originalQuery);
if (adapter instanceof SQLServerSQLDialectAdapter) {
SQLServerSQLDialectAdapter sqlServerAdapter = (SQLServerSQLDialectAdapter) adapter;
toReturn = sqlServerAdapter.sqlLimit(toReturn, 1);
} else {
toReturn += adapter.sqlSlice(0, Long.MIN_VALUE);
}
return toReturn;
}
=======
>>>>>>> |
<<<<<<<
private void runTests(String obdaFileName) throws Exception {
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
private void runTests() throws Exception {
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
private void runTests(String obdaFileName) throws Exception {
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
QuestOWLFactory factory = new QuestOWLFactory(new File(obdaFileName), new QuestPreferences(p));
QuestOWL reasoner = factory.createReasoner(ontology, new SimpleConfiguration());
=======
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder().obdaModel(obdaModel).preferences(p).build();
QuestOWL reasoner = factory.createReasoner(ontology, config);
>>>>>>>
QuestOWLFactory factory = new QuestOWLFactory(new File(obdaFileName), new QuestPreferences(p));
QuestOWL reasoner = factory.createReasoner(ontology, new SimpleConfiguration()); |
<<<<<<<
private final AtomFactory atomFactory;
private final TermFactory termFactory;
private final DatalogFactory datalogFactory;
@Inject
private LinearInclusionDependencyTools(AtomFactory atomFactory, TermFactory termFactory,
DatalogFactory datalogFactory) {
this.atomFactory = atomFactory;
this.termFactory = termFactory;
this.datalogFactory = datalogFactory;
}
public LinearInclusionDependencies getABoxDependencies(TBoxReasoner reasoner, boolean full) {
LinearInclusionDependencies dependencies = new LinearInclusionDependencies(datalogFactory);
=======
public static LinearInclusionDependencies getABoxDependencies(ClassifiedTBox reasoner, boolean full) {
LinearInclusionDependencies dependencies = new LinearInclusionDependencies();
>>>>>>>
private final AtomFactory atomFactory;
private final TermFactory termFactory;
private final DatalogFactory datalogFactory;
@Inject
private LinearInclusionDependencyTools(AtomFactory atomFactory, TermFactory termFactory,
DatalogFactory datalogFactory) {
this.atomFactory = atomFactory;
this.termFactory = termFactory;
this.datalogFactory = datalogFactory;
}
public LinearInclusionDependencies getABoxDependencies(ClassifiedTBox reasoner, boolean full) {
LinearInclusionDependencies dependencies = new LinearInclusionDependencies(datalogFactory); |
<<<<<<<
private final DatalogRule2QueryConverter datalogRule2QueryConverter;
=======
private final NoNullValueEnforcer noNullValueEnforcer;
>>>>>>>
private final NoNullValueEnforcer noNullValueEnforcer;
private final DatalogRule2QueryConverter datalogRule2QueryConverter;
<<<<<<<
ProvenanceMappingFactory provMappingFactory,
DatalogRule2QueryConverter datalogRule2QueryConverter){
=======
ProvenanceMappingFactory provMappingFactory,
NoNullValueEnforcer noNullValueEnforcer){
>>>>>>>
ProvenanceMappingFactory provMappingFactory,
NoNullValueEnforcer noNullValueEnforcer,
DatalogRule2QueryConverter datalogRule2QueryConverter){
<<<<<<<
this.datalogRule2QueryConverter = datalogRule2QueryConverter;
=======
this.noNullValueEnforcer = noNullValueEnforcer;
>>>>>>>
this.noNullValueEnforcer = noNullValueEnforcer;
this.datalogRule2QueryConverter = datalogRule2QueryConverter;
<<<<<<<
e -> datalogRule2QueryConverter.convertDatalogRule(dbMetadata, e.getKey(), extensionalPredicates, Optional.empty(),
iqFactory, executorRegistry),
Map.Entry::getValue));
=======
e -> noNullValueEnforcer.transform(
convertDatalogRule(
dbMetadata,
e.getKey(),
extensionalPredicates,
Optional.empty(),
iqFactory,
executorRegistry
)),
Map.Entry::getValue
));
>>>>>>>
e -> noNullValueEnforcer.transform(
datalogRule2QueryConverter.convertDatalogRule(
dbMetadata,
e.getKey(),
extensionalPredicates,
Optional.empty(),
iqFactory,
executorRegistry
)),
Map.Entry::getValue
)); |
<<<<<<<
import java.awt.event.ActionEvent;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
<<<<<<< HEAD:ontop-protege/src/main/java/it/unibz/inf/ontop/protege/gui/action/R2RMLImportAction.java
import com.google.inject.Guice;
import com.google.inject.Injector;
import it.unibz.inf.ontop.protege.core.OBDAModelWrapper;
import org.protege.editor.core.Disposable;
import org.protege.editor.core.ui.action.ProtegeAction;
import org.protege.editor.owl.OWLEditorKit;
import org.protege.editor.owl.model.OWLWorkspace;
=======
>>>>>>>
import com.google.inject.Guice;
import com.google.inject.Injector;
<<<<<<<
obdaModelController.addMapping(sourceID, mapping);
=======
obdaModel.addMapping(sourceID, mapping, false);
>>>>>>>
obdaModelController.addMapping(sourceID, mapping, false);
<<<<<<<
=======
} catch (Exception e) {
JOptionPane.showMessageDialog(null, "An error occurred. For more info, see the logs.");
log.error("Error during r2rml import. \n");
e.printStackTrace();
}
>>>>>>> |
<<<<<<<
@Override
public void replaceNodeByChild(QueryNode parentNode, Optional<ArgumentPosition> optionalReplacingChildPosition) {
tree.replaceNodeByChild(parentNode, optionalReplacingChildPosition);
}
=======
@Override
public QueryTreeComponent createSnapshot() {
return new DefaultQueryTreeComponent(tree.createSnapshot(), new VariableGenerator(
variableGenerator.getKnownVariables()));
}
>>>>>>>
@Override
public void replaceNodeByChild(QueryNode parentNode, Optional<ArgumentPosition> optionalReplacingChildPosition) {
tree.replaceNodeByChild(parentNode, optionalReplacingChildPosition);
}
@Override
public QueryTreeComponent createSnapshot() {
return new DefaultQueryTreeComponent(tree.createSnapshot(), new VariableGenerator(
variableGenerator.getKnownVariables()));
} |
<<<<<<<
=======
import org.semanticweb.ontop.model.Function;
import org.semanticweb.ontop.model.impl.OBDAVocabulary;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.TypePropagatingSubstitution;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.Substitution;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.SubstitutionUtilities;
>>>>>>>
<<<<<<<
import static org.semanticweb.ontop.owlrefplatform.core.unfolding.TypeLiftTools.removeHeadTypes;
import static org.semanticweb.ontop.owlrefplatform.core.unfolding.TypeLiftTools.updateMultiTypedFunctionSymbolIndex;
=======
import static org.semanticweb.ontop.owlrefplatform.core.basicoperations.TypePropagatingSubstitution.forceVariableReuse;
>>>>>>>
import static org.semanticweb.ontop.owlrefplatform.core.unfolding.TypeLiftTools.updateMultiTypedFunctionSymbolIndex;
<<<<<<<
=======
/**
* If there is no child proposal, no need to aggregate.
* Builds and returns a proposal just by looking at the rules defining the parent predicate.
*/
if (childProposalIndex.isEmpty()) {
return proposeTypeFromLocalRules(parentZipper);
}
/**
* Aggregates all these proposals according to the rules defining the parent predicate.
*
* If such aggregation is not possible, a MultiTypeException will be thrown.
*
* Returns the resulting proposal.
*
*/
final List<CQIE> parentRules = parentZipper.getLabel()._2();
final Substitution proposedSubstitutionFct = aggregateChildrenProposalsAndRules(
Option.<Substitution>none(), parentRules, childProposalIndex);
Function newProposal = (Function) parentRules.head().getHead().clone();
// Side-effect!
SubstitutionUtilities.applySubstitution(newProposal, proposedSubstitutionFct);
>>>>>>>
<<<<<<<
=======
/**
* Tail-recursive method "iterating" over the rules defining the parent predicate.
* In most case, there is just one of these rules.
*
* It brings a substitution function that is "updated" (new object)
* for each rule.
*
* Returns the final substitution function.
* May raises a MultiTypedException.
*
*/
private static Substitution aggregateChildrenProposalsAndRules(Option<Substitution> optionalSubstitutionFct,
List<CQIE> remainingRules, HashMap<Predicate, Function> childProposalIndex)
throws MultiTypeException {
/**
* Stop condition (no more rule to consider).
*/
if (remainingRules.isEmpty()) {
if (optionalSubstitutionFct.isNone()) {
throw new IllegalArgumentException("Do not give a None head with an empty list of rules");
}
/**
* Returns the proposed substitutions obtained from the previous rules.
*/
return optionalSubstitutionFct.some();
}
/**
* Main operation: updates the substitution function according to the current rule and the children proposals.
* May throw a MultipleTypeException.
*/
CQIE rule = remainingRules.head();
Substitution proposedSubstitutionFct = aggregateRuleAndProposals(optionalSubstitutionFct, extractBodyAtoms(rule),
childProposalIndex);
/**
* Tail recursion.
*/
return aggregateChildrenProposalsAndRules(Option.some(proposedSubstitutionFct), remainingRules.tail(), childProposalIndex);
}
/**
* Tail-recursive method that "iterates" over the body atoms of a given rule defining the parent predicate.
*
* For a given body atom, tries to make the *union* (NOT composition) of the current substitution function with
* the one deduced from the child proposal corresponding to the current atom.
*
* If some problems with a substitution function occur, throws a MultiTypeException.
*
*/
private static Substitution aggregateRuleAndProposals(final Option<Substitution> optionalSubstitutionFunction,
final List<Function> remainingBodyAtoms,
final HashMap<Predicate, Function> childProposalIndex) throws MultiTypeException {
>>>>>>>
<<<<<<<
=======
Function bodyAtom = remainingBodyAtoms.head();
Option<Function> optionalChildProposal = childProposalIndex.get(bodyAtom.getFunctionSymbol());
Option<Substitution> newOptionalSubstitutionFct;
>>>>>>>
<<<<<<<
final List<CQIE> updatedParentRules = proposal.getTypedRules();
=======
if (typePropagatingSubstitutionFunction == null) {
throw new SubstitutionUtilities.SubstitutionException();
}
/**
* The current substitution function may change variable names because they were not the same in the two atoms.
*
* Here, we are just interested in the types but we do not want to change the variable names.
* Thus, we force variable reuse.
*/
Substitution renamedSubstitutions = forceVariableReuse(typePropagatingSubstitutionFunction);
return renamedSubstitutions;
}
/**
* Applies the type proposal to the rule heads.
*
* Returns updated rules.
*/
private static List<CQIE> applyTypeToRules(List<CQIE> initialRules, final Function typeProposal)
throws TypeApplicationError{
return initialRules.map(new F<CQIE, CQIE>() {
@Override
public CQIE f(CQIE initialRule) {
Function currentHead = initialRule.getHead();
try {
Function newHead = applyTypeProposal(currentHead, typeProposal);
// Mutable object
CQIE newRule = initialRule.clone();
newRule.updateHead(newHead);
return newRule;
/**
* A SubstitutionException exception should not appear at this level.
* There is an inconsistency somewhere.
*
* Throws a runtime exception (TypeApplicationError)
* that should not be expected.
*/
} catch(SubstitutionUtilities.SubstitutionException e) {
throw new TypeApplicationError();
}
}
});
}
/**
* Propagates type from a typeProposal to one head atom.
*/
private static Function applyTypeProposal(Function headAtom, Function typeProposal) throws SubstitutionUtilities.SubstitutionException {
Substitution substitutionFunction = computeTypePropagatingSubstitution(headAtom, typeProposal);
// Mutable object
Function newHead = (Function) headAtom.clone();
// Limited side-effect
SubstitutionUtilities.applySubstitution(newHead, substitutionFunction);
return newHead;
}
/**
* Removes types from rules.
*
* Reuses the DatalogUnfolder.untypeTerm() static method.
*
* Returns updated rules.
*/
private static List<CQIE> removeTypesFromRules(List<CQIE> initialRules) {
return initialRules.map(new F<CQIE, CQIE>() {
@Override
public CQIE f(CQIE initialRule) {
Function initialHead = initialRule.getHead();
List<Term> initialHeadTerms = List.iterableList(initialHead.getTerms());
/**
* Computes untyped arguments for the head predicate.
*/
List<Term> newHeadTerms = initialHeadTerms.map(new F<Term, Term>() {
@Override
public Term f(Term term) {
return untypeTerm(term);
}
});
/**
* Builds a new rule.
* TODO: modernize the CQIE API (make it immutable).
*/
CQIE newRule = initialRule.clone();
Function newHead = (Function)initialHead.clone();
newHead.updateTerms(new ArrayList<>(newHeadTerms.toCollection()));
newRule.updateHead(newHead);
return newRule;
}
});
}
/**
* Makes a type proposal by looking at the rules defining the current predicate.
*
* Its current implementation is very basic and could be improved.
* It returns the head of the first rule.
*
* TODO: Several improvements could be done:
* 1. Unifying all the rule heads (case where is there is multiple rules).
* 2. Detecting if no type is present in the proposal and returning a None in
* this case.
*/
private static Option<Function> proposeTypeFromLocalRules(TreeZipper<P3<Predicate, List<CQIE>, Option<Function>>> currentZipper) {
List<CQIE> currentRules = currentZipper.getLabel()._2();
if (currentRules.isNotEmpty()) {
// Head of the first rule (cloned because mutable).
Function typeProposal = (Function) currentRules.head().getHead().clone();
>>>>>>>
final List<CQIE> updatedParentRules = proposal.getTypedRules(); |
<<<<<<<
=======
import it.unibz.inf.ontop.utils.ImmutableCollectors;
import it.unibz.inf.ontop.utils.UriTemplateMatcher;
>>>>>>>
import it.unibz.inf.ontop.utils.ImmutableCollectors;
<<<<<<<
ImmutableList<CQIE> rules = convertAssertions(
ontology.getClassAssertions(),
ontology.getObjectPropertyAssertions(),
ontology.getDataPropertyAssertions(),
annotationAssertions);
=======
ImmutableList.Builder<Function> heads = ImmutableList.builder();
for (ClassAssertion ca : ontology.getClassAssertions()) {
heads.add(convertClassAssertion(
((IRIConstant) ca.getIndividual()).getIRI(),
ca.getConcept().getIRI(), uriTemplateMatcher));
}
LOGGER.debug("Appended {} class assertions from ontology as fact rules", ontology.getClassAssertions().size());
for (ObjectPropertyAssertion pa : ontology.getObjectPropertyAssertions()) {
heads.add(convertObjectPropertyAssertion(
((IRIConstant) pa.getSubject()).getIRI(),
pa.getProperty().getIRI(),
((IRIConstant) pa.getObject()).getIRI(), uriTemplateMatcher));
}
LOGGER.debug("Appended {} object property assertions as fact rules", ontology.getObjectPropertyAssertions().size());
for (DataPropertyAssertion da : ontology.getDataPropertyAssertions()) {
heads.add(convertDataPropertyAssertion(
((IRIConstant) da.getSubject()).getIRI(),
da.getProperty().getIRI(),
da.getValue()));
}
LOGGER.debug("Appended {} data property assertions as fact rules", ontology.getDataPropertyAssertions().size());
if (isOntologyAnnotationQueryingEnabled) {
for (AnnotationAssertion aa : ontology.getAnnotationAssertions()) {
heads.add(convertAnnotationAssertion(
((IRIConstant) aa.getSubject()).getIRI(),
aa.getProperty().getIRI(),
aa.getValue()));
}
LOGGER.debug("Appended {} annotation assertions as fact rules", ontology.getAnnotationAssertions().size());
}
ImmutableList<CQIE> rules = heads.build().stream()
.map(h -> datalogFactory.getCQIE(h, Collections.emptyList()))
.collect(ImmutableCollectors.toList());
>>>>>>>
ImmutableList.Builder<Function> heads = ImmutableList.builder();
for (ClassAssertion ca : ontology.getClassAssertions()) {
heads.add(atomFactory.getMutableTripleHeadAtom(
ca.getIndividual(), ca.getConcept().getIRI()));
}
LOGGER.debug("Appended {} class assertions from ontology as fact rules", ontology.getClassAssertions().size());
for (ObjectPropertyAssertion pa : ontology.getObjectPropertyAssertions()) {
IRIConstant s = (IRIConstant) pa.getSubject();
IRIConstant o = (IRIConstant) pa.getObject();
IRI propertyIRI = pa.getProperty().getIRI();
heads.add(atomFactory.getMutableTripleHeadAtom(
s,
propertyIRI,
o));
}
LOGGER.debug("Appended {} object property assertions as fact rules", ontology.getObjectPropertyAssertions().size());
for (DataPropertyAssertion da : ontology.getDataPropertyAssertions()) {
// no blank nodes are supported here
IRIConstant s = (IRIConstant) da.getSubject();
RDFLiteralConstant o = da.getValue();
IRI propertyIRI = da.getProperty().getIRI();
heads.add(atomFactory.getMutableTripleHeadAtom(s, propertyIRI, o));
}
LOGGER.debug("Appended {} data property assertions as fact rules", ontology.getDataPropertyAssertions().size());
if (isOntologyAnnotationQueryingEnabled) {
for (AnnotationAssertion aa : ontology.getAnnotationAssertions()) {
IRIConstant s = (IRIConstant) aa.getSubject();
Constant v = aa.getValue();
IRI propertyIRI = aa.getProperty().getIRI();
Function head = (v instanceof RDFLiteralConstant)
? atomFactory.getMutableTripleHeadAtom(s, propertyIRI, (RDFLiteralConstant) v)
: atomFactory.getMutableTripleHeadAtom(s, propertyIRI, (IRIConstant) v);
heads.add(head);
}
LOGGER.debug("Appended {} annotation assertions as fact rules", ontology.getAnnotationAssertions().size());
}
ImmutableList<CQIE> rules = heads.build().stream()
.map(h -> datalogFactory.getCQIE(h, Collections.emptyList()))
.collect(ImmutableCollectors.toList()); |
<<<<<<<
import static it.unibz.inf.ontop.iq.node.BinaryOrderedOperatorNode.ArgumentPosition.*;
import static it.unibz.inf.ontop.model.term.functionsymbol.BooleanExpressionOperation.EQ;
=======
import static it.unibz.inf.ontop.OptimizationTestingTools.*;
import static it.unibz.inf.ontop.iq.node.BinaryOrderedOperatorNode.ArgumentPosition.LEFT;
import static it.unibz.inf.ontop.iq.node.BinaryOrderedOperatorNode.ArgumentPosition.RIGHT;
import static it.unibz.inf.ontop.model.term.functionsymbol.ExpressionOperation.EQ;
>>>>>>>
import static it.unibz.inf.ontop.OptimizationTestingTools.*;
import static it.unibz.inf.ontop.iq.node.BinaryOrderedOperatorNode.ArgumentPosition.LEFT;
import static it.unibz.inf.ontop.iq.node.BinaryOrderedOperatorNode.ArgumentPosition.RIGHT;
import static it.unibz.inf.ontop.model.term.functionsymbol.BooleanExpressionOperation.EQ; |
<<<<<<<
import java.util.Properties;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.*;
import org.junit.*;
import org.junit.Before;
import org.junit.Test;
=======
>>>>>>>
import java.util.Properties;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.*;
import org.junit.*;
import org.junit.Before;
import org.junit.Test;
<<<<<<<
import it.unibz.inf.ontop.owlrefplatform.core.QuestConstants;
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.OWLOntology;
import it.unibz.inf.ontop.owlapi3.OntopOWLException;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.reasoner.SimpleConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
=======
>>>>>>>
import it.unibz.inf.ontop.owlrefplatform.core.QuestConstants;
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.OWLOntology;
import it.unibz.inf.ontop.owlapi3.OntopOWLException;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.reasoner.SimpleConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
<<<<<<<
* Refer in particular to the class SparqlAlgebraToDatalogTranslator
=======
* Refer in particular to the class {@link it.unibz.inf.ontop.owlrefplatform.core.translator.SparqlAlgebraToDatalogTranslator}
>>>>>>>
* Refer in particular to the class {@link it.unibz.inf.ontop.owlrefplatform.core.translator.SparqlAlgebraToDatalogTranslator}
<<<<<<<
// String driver = "org.h2.Driver";
String url = "jdbc:h2:mem:questjunitdb";
String username = "sa";
String password = "";
conn = DriverManager.getConnection(url, username, password);
Statement st = conn.createStatement();
FileReader reader = new FileReader("src/test/resources/test/bind/sparqlBind-create-h2.sql");
BufferedReader in = new BufferedReader(reader);
StringBuilder bf = new StringBuilder();
String line = in.readLine();
while (line != null) {
bf.append(line);
line = in.readLine();
}
in.close();
st.executeUpdate(bf.toString());
conn.commit();
// Loading the OWL file
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
ontology = manager.loadOntologyFromOntologyDocument((new File(owlfile)));
}
@After
public void tearDown() throws Exception {
dropTables();
conn.close();
}
private void dropTables() throws SQLException, IOException {
Statement st = conn.createStatement();
FileReader reader = new FileReader("src/test/resources/test/bind/sparqlBind-drop-h2.sql");
BufferedReader in = new BufferedReader(reader);
StringBuilder bf = new StringBuilder();
String line = in.readLine();
while (line != null) {
bf.append(line);
line = in.readLine();
}
in.close();
st.executeUpdate(bf.toString());
st.close();
conn.commit();
}
private OWLObject runTests(Properties p, String query) throws Exception {
// Creating a new instance of the reasoner
QuestOWLFactory factory = new QuestOWLFactory(new File(obdafile), new QuestPreferences(p));
QuestOWL reasoner = factory.createReasoner(ontology, new SimpleConfiguration());
// Now we are ready for querying
QuestOWLConnection conn = reasoner.getConnection();
QuestOWLStatement st = conn.createStatement();
try {
QuestOWLResultSet rs = st.executeTuple(query);
=======
// String driver = "org.h2.Driver";
String url = "jdbc:h2:mem:questjunitdb";
String username = "sa";
String password = "";
fac = OBDADataFactoryImpl.getInstance();
conn = DriverManager.getConnection(url, username, password);
Statement st = conn.createStatement();
FileReader reader = new FileReader("src/test/resources/test/bind/sparqlBind-create-h2.sql");
BufferedReader in = new BufferedReader(reader);
StringBuilder bf = new StringBuilder();
String line = in.readLine();
while (line != null) {
bf.append(line);
line = in.readLine();
}
in.close();
st.executeUpdate(bf.toString());
conn.commit();
// Loading the OWL file
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
ontology = manager.loadOntologyFromOntologyDocument((new File(owlfile)));
// Loading the OBDA data
obdaModel = fac.getOBDAModel();
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load(obdafile);
}
@After
public void tearDown() throws Exception {
dropTables();
conn.close();
}
private void dropTables() throws SQLException, IOException {
Statement st = conn.createStatement();
FileReader reader = new FileReader("src/test/resources/test/bind/sparqlBind-drop-h2.sql");
BufferedReader in = new BufferedReader(reader);
StringBuilder bf = new StringBuilder();
String line = in.readLine();
while (line != null) {
bf.append(line);
line = in.readLine();
}
in.close();
st.executeUpdate(bf.toString());
st.close();
conn.commit();
}
private OWLObject runTests(QuestPreferences p, String query) throws Exception {
// Creating a new instance of the reasoner
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder().obdaModel(obdaModel).preferences(p).build();
QuestOWL reasoner = factory.createReasoner(ontology, config);
// Now we are ready for querying
QuestOWLConnection conn = reasoner.getConnection();
QuestOWLStatement st = conn.createStatement();
try {
QuestOWLResultSet rs = st.executeTuple(query);
>>>>>>>
// String driver = "org.h2.Driver";
String url = "jdbc:h2:mem:questjunitdb";
String username = "sa";
String password = "";
conn = DriverManager.getConnection(url, username, password);
Statement st = conn.createStatement();
FileReader reader = new FileReader("src/test/resources/test/bind/sparqlBind-create-h2.sql");
BufferedReader in = new BufferedReader(reader);
StringBuilder bf = new StringBuilder();
String line = in.readLine();
while (line != null) {
bf.append(line);
line = in.readLine();
}
in.close();
st.executeUpdate(bf.toString());
conn.commit();
// Loading the OWL file
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
ontology = manager.loadOntologyFromOntologyDocument((new File(owlfile)));
}
@After
public void tearDown() throws Exception {
dropTables();
conn.close();
}
private void dropTables() throws SQLException, IOException {
Statement st = conn.createStatement();
FileReader reader = new FileReader("src/test/resources/test/bind/sparqlBind-drop-h2.sql");
BufferedReader in = new BufferedReader(reader);
StringBuilder bf = new StringBuilder();
String line = in.readLine();
while (line != null) {
bf.append(line);
line = in.readLine();
}
in.close();
st.executeUpdate(bf.toString());
st.close();
conn.commit();
}
private OWLObject runTests(QuestPreferences p, String query) throws Exception {
// Creating a new instance of the reasoner
QuestOWLFactory factory = new QuestOWLFactory(new File(obdafile), new QuestPreferences(p));
QuestOWL reasoner = factory.createReasoner(ontology, new SimpleConfiguration());
// Now we are ready for querying
QuestOWLConnection conn = reasoner.getConnection();
QuestOWLStatement st = conn.createStatement();
try {
QuestOWLResultSet rs = st.executeTuple(query);
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
<<<<<<<
private void checkReturnedValues(Properties p, String query, List<String> expectedValues) throws Exception {
// Creating a new instance of the reasoner
QuestOWLFactory factory = new QuestOWLFactory(new File(obdafile), new QuestPreferences(p));
=======
private void checkReturnedValues(QuestPreferences p, String query, List<String> expectedValues) throws Exception {
// Creating a new instance of the reasoner
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder().obdaModel(obdaModel).preferences(p).build();
QuestOWL reasoner = factory.createReasoner(ontology, config);
>>>>>>>
private void checkReturnedValues(Properties p, String query, List<String> expectedValues) throws Exception {
// Creating a new instance of the reasoner
QuestOWLFactory factory = new QuestOWLFactory(new File(obdafile), new QuestPreferences(p)); |
<<<<<<<
import java.util.Optional;
import com.google.common.collect.ImmutableList;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.assistedinject.Assisted;
import it.unibz.inf.ontop.exception.DuplicateMappingException;
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
import it.unibz.inf.ontop.injection.OBDAFactoryWithException;
import it.unibz.inf.ontop.injection.OBDAProperties;
=======
>>>>>>>
import java.util.Optional;
import com.google.common.collect.ImmutableList;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.assistedinject.Assisted;
import it.unibz.inf.ontop.exception.DuplicateMappingException;
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
import it.unibz.inf.ontop.injection.OBDAFactoryWithException;
import it.unibz.inf.ontop.injection.OBDAProperties;
<<<<<<<
import it.unibz.inf.ontop.owlrefplatform.core.srcquerygeneration.NativeQueryGenerator;
import it.unibz.inf.ontop.owlrefplatform.core.translator.MappingVocabularyFixer;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
import it.unibz.inf.ontop.model.impl.RDBMSourceParameterConstants;
import it.unibz.inf.ontop.ontology.Ontology;
import it.unibz.inf.ontop.owlrefplatform.core.basicoperations.VocabularyValidator;
import it.unibz.inf.ontop.owlrefplatform.core.mappingprocessing.TMappingExclusionConfig;
import it.unibz.inf.ontop.owlrefplatform.injection.QuestComponentFactory;
import it.unibz.inf.ontop.owlrefplatform.injection.QuestCorePreferences;
import it.unibz.inf.ontop.sql.ImplicitDBConstraintsReader;
import it.unibz.inf.ontop.utils.IMapping2DatalogConverter;
=======
import it.unibz.inf.ontop.owlrefplatform.core.srcquerygeneration.SQLGenerator;
import it.unibz.inf.ontop.owlrefplatform.core.srcquerygeneration.SQLQueryGenerator;
import it.unibz.inf.ontop.owlrefplatform.core.translator.MappingVocabularyRepair;
import it.unibz.inf.ontop.sql.*;
import it.unibz.inf.ontop.utils.MappingParser;
import net.sf.jsqlparser.JSQLParserException;
import org.apache.tomcat.jdbc.pool.DataSource;
import org.apache.tomcat.jdbc.pool.PoolProperties;
>>>>>>>
import it.unibz.inf.ontop.owlrefplatform.core.srcquerygeneration.NativeQueryGenerator;
import it.unibz.inf.ontop.owlrefplatform.core.translator.MappingVocabularyFixer;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
import it.unibz.inf.ontop.model.impl.RDBMSourceParameterConstants;
import it.unibz.inf.ontop.ontology.Ontology;
import it.unibz.inf.ontop.owlrefplatform.core.basicoperations.VocabularyValidator;
import it.unibz.inf.ontop.owlrefplatform.core.mappingprocessing.TMappingExclusionConfig;
import it.unibz.inf.ontop.owlrefplatform.injection.QuestComponentFactory;
import it.unibz.inf.ontop.owlrefplatform.injection.QuestCorePreferences;
import it.unibz.inf.ontop.sql.ImplicitDBConstraintsReader;
import it.unibz.inf.ontop.utils.IMapping2DatalogConverter;
<<<<<<<
loadOBDAModel(obdaModel.orElse(null), inputOntology.getVocabulary());
// TODO: use the Optional instead
this.metadata = inputMetadata.orElse(null);
=======
loadOBDAModel(obdaModel);
}
/** Davide> Exclude specific predicates from T-Mapping approach **/
public void setExcludeFromTMappings(TMappingExclusionConfig excludeFromTMappings){
assert(excludeFromTMappings != null);
this.excludeFromTMappings = excludeFromTMappings;
}
/**
* Supply user constraints: that is primary and foreign keys not in the database
* Can be useful for eliminating self-joins
*
* @param userConstraints User supplied primary and foreign keys (only useful if these are not in the metadata)
* May be used by ontop to eliminate self-joins
*/
public void setImplicitDBConstraints(ImplicitDBConstraintsReader userConstraints) {
assert(userConstraints != null);
this.userConstraints = userConstraints;
this.applyUserConstraints = true;
}
/**
* Enable/Disable querying annotation properties defined in the ontology
* It overrides the value defined in QuestPreferences
*
* @param queryingAnnotationsInOntology
*/
public void setQueryingAnnotationsInOntology(boolean queryingAnnotationsInOntology) {
this.queryingAnnotationsInOntology = queryingAnnotationsInOntology;
}
/**
* Enable/Disable querying sameAs properties defined in the mapping
* It overrides the value defined in QuestPreferences
*
* @param sameAsInMapping
*/
public void setSameAsInMapping(boolean sameAsInMapping) {
this.sameAsInMapping = sameAsInMapping;
>>>>>>>
loadOBDAModel(obdaModel.orElse(null), inputOntology.getVocabulary());
// TODO: use the Optional instead
this.metadata = inputMetadata.orElse(null);
<<<<<<<
// we work in memory (with H2), the database is clean and
=======
log.warn("Semantic index mode initializing: \nString operation over URI are not supported in this mode ");
// we work in memory (with H2), the database is clean and
>>>>>>>
log.warn("Semantic index mode initializing: \nString operation over URI are not supported in this mode ");
// we work in memory (with H2), the database is clean and
<<<<<<<
unfolder.setupInSemanticIndexMode(mappings, dbConnector, reformulationReasoner, metadata);
if (dataRepository != null)
dataRepository.addRepositoryChangedListener(new RepositoryChangedListener() {
@Override
public void repositoryChanged() {
engine.clearNativeQueryCache();
try {
//
unfolder.setupInSemanticIndexMode(dataRepository.getMappings(), dbConnector, reformulationReasoner,
metadata);
log.debug("Mappings and unfolder have been updated after inserts to the semantic index DB");
}
catch (Exception e) {
log.error("Error updating Semantic Index mappings", e);
}
}
});
=======
unfolder.setupInSemanticIndexMode(mappings, reformulationReasoner);
>>>>>>>
unfolder.setupInSemanticIndexMode(mappings, dbConnector, reformulationReasoner, metadata);
<<<<<<<
engine = new QuestQueryProcessor(rewriter, sigma, unfolder, vocabularyValidator, getUriMap(),
dataSourceQueryGenerator, queryCache, distinctResultSet, injector);
=======
engine = new QuestQueryProcessor(rewriter, sigma, unfolder, vocabularyValidator, getUriMap(), datasourceQueryGenerator);
if (dataRepository != null)
dataRepository.addRepositoryChangedListener(new RepositoryChangedListener() {
@Override
public void repositoryChanged() {
engine.clearSQLCache();
try {
//
engine = engine.changeMappings(dataRepository.getMappings(), reformulationReasoner);
log.debug("Mappings and unfolder have been updated after inserts to the semantic index DB");
}
catch (Exception e) {
log.error("Error updating Semantic Index mappings", e);
}
}
});
>>>>>>>
engine = new QuestQueryProcessor(rewriter, sigma, unfolder, vocabularyValidator, getUriMap(),
dataSourceQueryGenerator, queryCache, distinctResultSet, injector);
if (dataRepository != null)
dataRepository.addRepositoryChangedListener(new RepositoryChangedListener() {
@Override
public void repositoryChanged() {
engine.clearNativeQueryCache();
try {
//
engine = engine.changeMappings(dataRepository.getMappings(), reformulationReasoner);
log.debug("Mappings and unfolder have been updated after inserts to the semantic index DB");
}
catch (Exception e) {
log.error("Error updating Semantic Index mappings", e);
}
}
}); |
<<<<<<<
Term c1 = TERM_FACTORY.getConstantIRI(new SimpleRDF().createIRI("URI1"));
Term c2 = TERM_FACTORY.getRDFLiteralConstant("m", XSD.STRING);
=======
Term c1 = TERM_FACTORY.getConstantIRI(RDF_FACTORY.createIRI("urn:URI1"));
Term c2 = TERM_FACTORY.getConstantLiteral("m");
>>>>>>>
Term c1 = TERM_FACTORY.getConstantIRI(RDF_FACTORY.createIRI("urn:URI1"));
Term c2 = TERM_FACTORY.getRDFLiteralConstant("m", XSD.STRING); |
<<<<<<<
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.semanticweb.ontop.model.BNodePredicate;
import org.semanticweb.ontop.model.CQIE;
import org.semanticweb.ontop.model.DatalogProgram;
import org.semanticweb.ontop.model.Function;
import org.semanticweb.ontop.model.OBDADataFactory;
import org.semanticweb.ontop.model.OBDAException;
import org.semanticweb.ontop.model.Predicate;
import org.semanticweb.ontop.model.Term;
import org.semanticweb.ontop.model.URIConstant;
import org.semanticweb.ontop.model.URITemplatePredicate;
import org.semanticweb.ontop.model.ValueConstant;
import org.semanticweb.ontop.model.Variable;
import org.semanticweb.ontop.model.impl.*;
import org.semanticweb.ontop.ontology.BasicClassDescription;
import org.semanticweb.ontop.ontology.Property;
import org.semanticweb.ontop.owlrefplatform.core.EquivalenceMap;
=======
import org.semanticweb.ontop.model.*;
import org.semanticweb.ontop.model.impl.AnonymousVariable;
import org.semanticweb.ontop.model.impl.FunctionalTermImpl;
import org.semanticweb.ontop.model.impl.OBDADataFactoryImpl;
import org.semanticweb.ontop.model.impl.VariableImpl;
import org.semanticweb.ontop.ontology.*;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.VocabularyValidator;
>>>>>>>
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.semanticweb.ontop.model.BNodePredicate;
import org.semanticweb.ontop.model.CQIE;
import org.semanticweb.ontop.model.Function;
import org.semanticweb.ontop.model.OBDADataFactory;
import org.semanticweb.ontop.model.OBDAException;
import org.semanticweb.ontop.model.Predicate;
import org.semanticweb.ontop.model.Term;
import org.semanticweb.ontop.model.URIConstant;
import org.semanticweb.ontop.model.URITemplatePredicate;
import org.semanticweb.ontop.model.ValueConstant;
import org.semanticweb.ontop.model.Variable;
import org.semanticweb.ontop.model.impl.AnonymousVariable;
import org.semanticweb.ontop.model.impl.FunctionalTermImpl;
import org.semanticweb.ontop.model.impl.OBDADataFactoryImpl;
import org.semanticweb.ontop.model.impl.VariableImpl;
import org.semanticweb.ontop.ontology.*;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.VocabularyValidator; |
<<<<<<<
import it.unibz.inf.ontop.model.OBDADataFactory;
import it.unibz.inf.ontop.model.OBDADataSource;
import it.unibz.inf.ontop.model.OBDAException;
import it.unibz.inf.ontop.model.OBDAMappingAxiom;
import it.unibz.inf.ontop.model.OBDAMappingListener;
import it.unibz.inf.ontop.model.OBDAModel;
import it.unibz.inf.ontop.model.OBDAModelListener;
import it.unibz.inf.ontop.model.Predicate;
>>>>>>> v3/package-names-changed:ontop-protege/src/main/java/it/unibz/inf/ontop/protege/core/OBDAModelManager.java
=======
import it.unibz.inf.ontop.model.*;
>>>>>>>
import it.unibz.inf.ontop.model.*;
<<<<<<<
=======
import it.unibz.inf.ontop.protege.utils.DialogUtils;
<<<<<<< /mnt/batch/tasks/workitems/adfv2-General_1/job-1/fad8da52-01bf-4e2d-a280-0845b520f900/wd/.temp/athenacommon/cbe6d17a-9824-401b-a916-c042ae2c3cdf.java
>>>>>>> v3/package-names-changed:ontop-protege/src/main/java/it/unibz/inf/ontop/protege/core/OBDAModelManager.java
import it.unibz.inf.ontop.querymanager.QueryController;
import it.unibz.inf.ontop.querymanager.QueryControllerEntity;
import it.unibz.inf.ontop.querymanager.QueryControllerGroup;
import it.unibz.inf.ontop.querymanager.QueryControllerListener;
import it.unibz.inf.ontop.querymanager.QueryControllerQuery;
=======
import it.unibz.inf.ontop.querymanager.*;
import it.unibz.inf.ontop.sql.ImplicitDBConstraintsReader;
>>>>>>>
import it.unibz.inf.ontop.protege.utils.DialogUtils;
import it.unibz.inf.ontop.querymanager.*;
import it.unibz.inf.ontop.sql.ImplicitDBConstraintsReader;
<<<<<<<
import it.unibz.inf.ontop.sql.ImplicitDBConstraints;
<<<<<<< HEAD:ontop-protege/src/main/java/it/unibz/inf/ontop/protege/core/OBDAModelManager.java
import java.io.*;
=======
import java.io.File;
import java.io.IOException;
>>>>>>> v3/package-names-changed:ontop-protege/src/main/java/it/unibz/inf/ontop/protege/core/OBDAModelManager.java
import java.net.URI;
import java.util.*;
=======
>>>>>>>
<<<<<<<
private OWLEditorKit owlEditorKit;
=======
private final OWLEditorKit owlEditorKit;
>>>>>>>
private final OWLEditorKit owlEditorKit;
<<<<<<<
private Map<URI, OBDAModelWrapper> obdamodels;
=======
private final Map<URI, OBDAModel> obdamodels;
>>>>>>>
private final Map<URI, OBDAModelWrapper> obdamodels;
<<<<<<<
private static final OntologyFactory ofac = OntologyFactoryImpl.getInstance();
=======
private boolean applyUserConstraints = false;
private ImplicitDBConstraintsReader userConstraints;
>>>>>>>
private boolean applyUserConstraints = false;
private ImplicitDBConstraintsReader userConstraints;
<<<<<<<
obdaManagerListeners = new ArrayList<>();
obdamodels = new HashMap<>();
=======
obdaManagerListeners = new ArrayList<>();
obdamodels = new HashMap<>();
>>>>>>>
obdaManagerListeners = new ArrayList<>();
obdamodels = new HashMap<>();
<<<<<<<
IRI ontologyIRI = ontologyID.getOntologyIRI();
URI uri;
if (ontologyIRI != null) {
uri = ontologyIRI.toURI();
} else {
uri = URI.create(ontologyID.toString());
}
=======
Optional<IRI> optionalOntologyIRI = ontologyID.getOntologyIRI();
URI uri;
if(optionalOntologyIRI.isPresent()){
uri = optionalOntologyIRI.get().toURI();
} else {
uri = URI.create(ontologyID.toString());
}
>>>>>>>
Optional<IRI> optionalOntologyIRI = ontologyID.getOntologyIRI();
URI uri;
if(optionalOntologyIRI.isPresent()){
uri = optionalOntologyIRI.get().toURI();
} else {
uri = URI.create(ontologyID.toString());
}
<<<<<<<
initializing = true; // flag on
// Setting up a new OBDA model and retrieve the object.
setupNewOBDAModel();
reloadReasonerFactory();
fireActiveOBDAModelChange();
initializing = false; // flag off
=======
handleActiveOntologyChanged();
>>>>>>>
handleActiveOntologyChanged();
<<<<<<<
loadOntologyAndMappings(source, activeOntology);
=======
handleOntologyLoadedAndReLoaded(source, activeOntology);
>>>>>>>
handleOntologyLoadedAndReLoaded(source, activeOntology);
<<<<<<<
log.debug("ACTIVE ONTOLOGY SAVED");
saveOntologyAndMappings(source, activeOntology);
break;
=======
log.debug("ONTOLOGY SAVED");
handleOntologySaved(source, activeOntology);
break;
>>>>>>>
log.debug("ONTOLOGY SAVED");
handleOntologySaved(source, activeOntology);
break;
<<<<<<<
OWLClass newClass = owlmm.getOWLDataFactory().getOWLClass(IRI.create("http://www.unibz.it/krdb/obdaplugin#RandomClass" + UUID.randomUUID()));
=======
OWLClass newClass = owlmm.getOWLDataFactory().getOWLClass(IRI.create("http://www.unibz.it/inf/obdaplugin#RandomClass6677841155"));
>>>>>>>
OWLClass newClass = owlmm.getOWLDataFactory().getOWLClass(IRI.create("http://www.unibz.it/inf/obdaplugin#RandomClass6677841155")); |
<<<<<<<
=======
import eu.optique.api.mapping.R2RMLMappingManager;
import eu.optique.api.mapping.R2RMLMappingManagerFactory;
import eu.optique.api.mapping.TriplesMap;
import eu.optique.api.mapping.impl.sesame.SesameR2RMLMappingManagerFactory;
import it.unibz.inf.ontop.io.PrefixManager;
>>>>>>>
import eu.optique.api.mapping.R2RMLMappingManager;
import eu.optique.api.mapping.R2RMLMappingManagerFactory;
import eu.optique.api.mapping.TriplesMap;
import eu.optique.api.mapping.impl.sesame.SesameR2RMLMappingManagerFactory;
import it.unibz.inf.ontop.io.PrefixManager;
<<<<<<<
import it.unibz.inf.ontop.io.PrefixManager;
import java.io.*;
import java.net.URI;
import java.util.*;
=======
>>>>>>> |
<<<<<<<
import org.semanticweb.ontop.owlrefplatform.core.EquivalenceMap;
import org.semanticweb.ontop.owlrefplatform.core.EquivalenceMapImpl;
=======
>>>>>>>
<<<<<<<
TBoxReasoner reasoner = new TBoxReasonerImpl(ontologyClosure);
// this is used to simplify the vocabulary of ABox assertions and mappings
equivalenceMaps = EquivalenceMapImpl.getEquivalenceMap(reasoner);
=======
TBoxReasoner ontoReasoner = new TBoxReasonerImpl(ontologyClosure);
>>>>>>>
TBoxReasoner ontoReasoner = new TBoxReasonerImpl(ontologyClosure);
<<<<<<<
dataRepository = new RDBMSSIRepositoryManagerImpl(optimizedOntology.getVocabulary());
TBoxReasoner optimizedDag = new TBoxReasonerImpl(optimizedOntology);
dataRepository.setTBox(optimizedDag);
=======
dataRepository = new RDBMSSIRepositoryManager();
dataRepository.setTBox(reasoner);
>>>>>>>
dataRepository = new RDBMSSIRepositoryManager();
dataRepository.setTBox(reasoner); |
<<<<<<<
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
=======
>>>>>>>
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
<<<<<<<
import org.semanticweb.ontop.model.*;
=======
import org.semanticweb.ontop.io.SimplePrefixManager;
import org.semanticweb.ontop.model.CQIE;
import org.semanticweb.ontop.model.Function;
import org.semanticweb.ontop.model.OBDADataFactory;
import org.semanticweb.ontop.model.OBDADataSource;
import org.semanticweb.ontop.model.OBDAMappingAxiom;
import org.semanticweb.ontop.model.OBDAMappingListener;
import org.semanticweb.ontop.model.OBDAModel;
import org.semanticweb.ontop.model.OBDAModelListener;
import org.semanticweb.ontop.model.OBDAQuery;
import org.semanticweb.ontop.model.Predicate;
import org.semanticweb.ontop.ontology.DataPropertyExpression;
import org.semanticweb.ontop.ontology.OClass;
import org.semanticweb.ontop.ontology.ObjectPropertyExpression;
import org.semanticweb.ontop.ontology.OntologyVocabulary;
import org.semanticweb.ontop.querymanager.QueryController;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
>>>>>>>
import org.semanticweb.ontop.model.*;
import org.semanticweb.ontop.ontology.DataPropertyExpression;
import org.semanticweb.ontop.ontology.OClass;
import org.semanticweb.ontop.ontology.ObjectPropertyExpression;
import org.semanticweb.ontop.ontology.OntologyVocabulary;
<<<<<<<
private final ImmutableMap<URI, ImmutableList<OBDAMappingAxiom>> mappingIndexByDataSource;
private final ImmutableMap<String, OBDAMappingAxiom> mappingIndexById;
=======
private QueryController queryController;
private PrefixManager prefixManager;
private HashMap<URI, OBDADataSource> datasources;
private ArrayList<OBDAModelListener> sourceslisteners;
private Hashtable<URI, ArrayList<OBDAMappingAxiom>> mappings;
private ArrayList<OBDAMappingListener> mappinglisteners;
private static OBDADataFactory dfac = OBDADataFactoryImpl.getInstance();
private static final Logger log = LoggerFactory.getLogger(OBDAModelImpl.class);
private final Set<OClass> declaredClasses = new LinkedHashSet<OClass>();
private final Set<ObjectPropertyExpression> declaredObjectProperties = new LinkedHashSet<ObjectPropertyExpression>();
private final Set<DataPropertyExpression> declaredDataProperties = new LinkedHashSet<DataPropertyExpression>();
>>>>>>>
private final ImmutableMap<URI, ImmutableList<OBDAMappingAxiom>> mappingIndexByDataSource;
private final ImmutableMap<String, OBDAMappingAxiom> mappingIndexById; |
<<<<<<<
import it.unibz.inf.ontop.model.type.RDFDatatype;
import it.unibz.inf.ontop.model.type.TermType;
import it.unibz.inf.ontop.utils.EncodeForURI;
=======
import it.unibz.inf.ontop.utils.R2RMLIRISafeEncoder;
>>>>>>>
import it.unibz.inf.ontop.model.type.RDFDatatype;
import it.unibz.inf.ontop.utils.R2RMLIRISafeEncoder;
<<<<<<<
String template = baseIRI + percentEncode(td.getID().getTableName()) + "/" + Joiner.on(";").join(attributes);
terms.add(termFactory.getConstantLiteral(template));
=======
String template = baseIRI + R2RMLIRISafeEncoder.encode(td.getID().getTableName()) + "/" + Joiner.on(";").join(attributes);
terms.add(df.getConstantLiteral(template));
>>>>>>>
String template = baseIRI + R2RMLIRISafeEncoder.encode(td.getID().getTableName()) + "/" + Joiner.on(";").join(attributes);
terms.add(termFactory.getConstantLiteral(template)); |
<<<<<<<
public <T> T acceptVisitor(IQVisitor<T> visitor) {
return getRootNode().acceptVisitor(visitor, leftChild, rightChild);
}
@Override
public IQTree liftBinding(VariableGenerator variableGenerator) {
=======
public IQTree normalizeForOptimization(VariableGenerator variableGenerator) {
>>>>>>>
public <T> T acceptVisitor(IQVisitor<T> visitor) {
return getRootNode().acceptVisitor(visitor, leftChild, rightChild);
}
@Override
public IQTree normalizeForOptimization(VariableGenerator variableGenerator) { |
<<<<<<<
=======
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableTable;
import it.unibz.inf.ontop.exception.IncompatibleTermException;
import it.unibz.inf.ontop.model.term.*;
>>>>>>>
<<<<<<<
import it.unibz.inf.ontop.model.term.TermConstants;
=======
import it.unibz.inf.ontop.model.term.functionsymbol.Predicate.COL_TYPE;
>>>>>>>
import it.unibz.inf.ontop.model.term.TermConstants;
<<<<<<<
=======
/**
* This table is not "ground truth" and deserve to be discussed (is it good enough or not?)
*/
private static final ImmutableTable<COL_TYPE, COL_TYPE, COL_TYPE> DATATYPE_DENOMINATORS = generateDatatypeDenominators();
private static ImmutableTable<COL_TYPE, COL_TYPE, COL_TYPE> generateDatatypeDenominators() {
// Child: Parent
Map<COL_TYPE, COL_TYPE> datatypeHierarchy = ImmutableMap.<COL_TYPE, COL_TYPE>builder()
.put(LANG_STRING, LITERAL)
.put(STRING, LITERAL)
.put(BOOLEAN, LITERAL)
.put(DATE, LITERAL)
.put(DATETIME, LITERAL)
.put(DATETIME_STAMP, DATETIME)
.put(TIME, LITERAL)
.put(YEAR, LITERAL)
.put(DOUBLE, LITERAL)
.put(FLOAT, DOUBLE) // Type promotion (https://www.w3.org/TR/xpath20/#dt-type-promotion)
.put(DECIMAL, FLOAT) // Type promotion
.put(INTEGER, DECIMAL) // Subtype substitution (https://www.w3.org/TR/xpath20/#dt-subtype-substitution)
.put(LONG, INTEGER) // Subtype substitution
.put(INT, LONG) // Subtype substitution
.put(NON_NEGATIVE_INTEGER, INTEGER) // Subtype substitution
.put(POSITIVE_INTEGER, NON_NEGATIVE_INTEGER) // Subtype substitution
.put(NON_POSITIVE_INTEGER, INTEGER) // Subtype substitution
.put(NEGATIVE_INTEGER, NON_POSITIVE_INTEGER) // Subtype substitution
.put(UNSIGNED_INT, NON_NEGATIVE_INTEGER) // Subtype substitution
.build();
ImmutableTable.Builder<COL_TYPE, COL_TYPE, COL_TYPE> saturatedHierarchyBuilder = ImmutableTable.builder();
datatypeHierarchy.forEach((child, parent) -> {
saturatedHierarchyBuilder.put(child, child, child);
// Non-final
COL_TYPE ancestor = parent;
// Transitive closure
while (ancestor != null) {
saturatedHierarchyBuilder.put(child, ancestor, ancestor);
saturatedHierarchyBuilder.put(ancestor, child, ancestor);
ancestor = datatypeHierarchy.get(ancestor);
}
});
ImmutableTable<COL_TYPE, COL_TYPE, COL_TYPE> saturatedHierarchy = saturatedHierarchyBuilder.build();
ImmutableTable.Builder<COL_TYPE, COL_TYPE, COL_TYPE> tableBuilder = ImmutableTable.<COL_TYPE, COL_TYPE, COL_TYPE>builder()
// Base COL_TYPES
.put(LITERAL, LITERAL, LITERAL)
.put(OBJECT, OBJECT, OBJECT)
.put(BNODE, BNODE, BNODE)
.put(NULL, NULL, NULL)
.put(UNSUPPORTED, UNSUPPORTED, UNSUPPORTED)
.putAll(saturatedHierarchy);
/**
* Other literal type combinations
*/
COL_TYPE.LITERAL_TYPES.stream().forEach(
type1 -> COL_TYPE.LITERAL_TYPES.stream().forEach(
type2 -> {
if ((!type1.equals(type2) && (!saturatedHierarchy.contains(type1, type2)))) {
tableBuilder.put(type1, type2, LITERAL);
}
}
)
);
return tableBuilder.build();
}
>>>>>>> |
<<<<<<<
import java.util.HashMap;
=======
import java.util.Iterator;
import java.util.LinkedList;
>>>>>>>
<<<<<<<
import com.google.common.collect.ImmutableList;
=======
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
>>>>>>>
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
<<<<<<<
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.CQCUtilities;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.DatalogNormalizer;
import org.semanticweb.ontop.owlrefplatform.core.execution.TargetQueryExecutionException;
=======
import org.semanticweb.ontop.model.impl.OBDAVocabulary;
import org.semanticweb.ontop.ontology.Assertion;
import org.semanticweb.ontop.owlrefplatform.core.abox.EquivalentTriplePredicateIterator;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.*;
import org.semanticweb.ontop.owlrefplatform.core.optimization.BasicJoinOptimizer;
import org.semanticweb.ontop.owlrefplatform.core.optimization.BasicTypeLiftOptimizer;
>>>>>>>
import org.semanticweb.ontop.model.impl.OBDAVocabulary;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.DatalogNormalizer;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.FunctionFlattener;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.PullOutEqualityNormalizer;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.PullOutEqualityNormalizerImpl;
import org.semanticweb.ontop.owlrefplatform.core.execution.TargetQueryExecutionException;
import org.semanticweb.ontop.owlrefplatform.core.optimization.BasicJoinOptimizer;
<<<<<<<
import org.semanticweb.ontop.owlrefplatform.core.resultset.EmptyQueryResultSet;
import org.semanticweb.ontop.owlrefplatform.core.resultset.QuestGraphResultSet;
import org.semanticweb.ontop.owlrefplatform.core.resultset.QuestResultset;
import org.semanticweb.ontop.owlrefplatform.core.srcquerygeneration.NativeQueryGenerator;
=======
import org.semanticweb.ontop.owlrefplatform.core.resultset.*;
import org.semanticweb.ontop.owlrefplatform.core.srcquerygeneration.SQLQueryGenerator;
>>>>>>>
import org.semanticweb.ontop.owlrefplatform.core.resultset.EmptyQueryResultSet;
import org.semanticweb.ontop.owlrefplatform.core.resultset.QuestGraphResultSet;
import org.semanticweb.ontop.owlrefplatform.core.resultset.QuestResultset;
import org.semanticweb.ontop.owlrefplatform.core.srcquerygeneration.NativeQueryGenerator;
<<<<<<<
import com.google.common.collect.Multimap;
import javax.annotation.Nullable;
=======
>>>>>>>
import com.google.common.collect.Multimap;
import javax.annotation.Nullable;
<<<<<<<
this.translator = new SparqlAlgebraToDatalogTranslator(this.questInstance.getUriTemplateMatcher());
=======
>>>>>>>
<<<<<<<
@Override
public IQuest getQuestInstance() {
=======
public Quest getQuestInstance() {
>>>>>>>
@Override
public IQuest getQuestInstance() {
<<<<<<<
public ResultSet getResultSet() {
return resultSet;
=======
// TODO: replace the magic number by an enum
public void setQueryType(int type) {
switch (type) {// encoding of query type to from numbers
case 1:
this.isSelect = true;
break;
case 2:
this.isBoolean = true;
break;
case 3:
this.isConstruct = true;
break;
case 4:
this.isDescribe = true;
break;
}
>>>>>>>
public ResultSet getResultSet() {
return resultSet;
<<<<<<<
log.debug("Executing query: \n{}", sparqlQuery);
=======
log.debug("Executing SPARQL query: \n{}", strquery);
>>>>>>>
log.debug("Executing SPARQL query: \n{}", sparqlQuery);
<<<<<<<
/**
* Extracts the target query from the cache or computes it.
*/
TargetQuery targetQuery = queryCache.getTargetQuery(sparqlQuery);
if (targetQuery == null) {
targetQuery = unfoldAndGenerateTargetQuery(sparqlQuery, constructTemplate);
}
/**
* Executes the target query.
*/
log.debug("Executing the query and get the result...");
try {
executingTargetQuery = true;
switch (queryType) {
case ASK:
resultSet = executeBooleanQuery(targetQuery);
break;
case SELECT:
resultSet = executeSelectQuery(targetQuery);
break;
case CONSTRUCT:
resultSet = executeConstructQuery(targetQuery);
break;
case DESCRIBE:
resultSet = executeDescribeQuery(targetQuery);
break;
}
} catch (TargetQueryExecutionException e) {
=======
if (!questInstance.hasCachedSQL(strquery)) {
getUnfolding(strquery);
}
// Obtaining the query from the cache
String sql = questInstance.getCachedSQL(strquery);
List<String> signature = questInstance.getSignatureCache().get(strquery);
//ParsedQuery query = sesameQueryCache.get(strquery);
log.debug("Executing the SQL query and get the result...");
if (sql.equals("") && !isBoolean) {
tupleResult = new EmptyQueryResultSet(signature, QuestStatement.this);
}
else if (sql.equals("")) {
tupleResult = new BooleanOWLOBDARefResultSet(false, QuestStatement.this);
}
else {
try {
// FOR debugging H2 in-memory database
// try {
// org.h2.tools.Server.startWebServer(conn.getConnection());
// } catch (SQLException e) {
// e.printStackTrace();
// }
// Execute the SQL query string
executingSQL = true;
ResultSet set = null;
// try {
// FOR debugging H2 in-memory database
// org.h2.tools.Server.startWebServer(conn.conn);
set = sqlstatement.executeQuery(sql);
// Store the SQL result to application result set.
if (isSelect) { // is tuple-based results
if(questInstance.getDatasourceQueryGenerator().hasDistinctResultSet()) {
tupleResult = new QuestDistinctResultset(set, signature, QuestStatement.this );
}
else {
tupleResult = new QuestResultset(set, signature, QuestStatement.this);
}
} else if (isBoolean) {
tupleResult = new BooleanOWLOBDARefResultSet(set, QuestStatement.this);
} else if (isConstruct || isDescribe) {
boolean collectResults = false;
if (isDescribe)
collectResults = true;
//Template template = query.getConstructTemplate();
TupleResultSet tuples = null;
tuples = new QuestResultset(set, signature, QuestStatement.this);
graphResult = new QuestGraphResultSet(tuples, templ, collectResults);
}
} catch (SQLException e) {
>>>>>>>
/**
* Extracts the target query from the cache or computes it.
*/
TargetQuery targetQuery = queryCache.getTargetQuery(sparqlQuery);
if (targetQuery == null) {
targetQuery = unfoldAndGenerateTargetQuery(sparqlQuery, constructTemplate);
}
/**
* Executes the target query.
*/
log.debug("Executing the query and get the result...");
try {
executingTargetQuery = true;
switch (queryType) {
case ASK:
resultSet = executeBooleanQuery(targetQuery);
break;
case SELECT:
resultSet = executeSelectQuery(targetQuery);
break;
case CONSTRUCT:
resultSet = executeConstructQuery(targetQuery);
break;
case DESCRIBE:
resultSet = executeDescribeQuery(targetQuery);
break;
}
} catch (TargetQueryExecutionException e) {
<<<<<<<
SesameConstructTemplate constructTemplate;
=======
>>>>>>>
SesameConstructTemplate constructTemplate;
<<<<<<<
String selectSparqlQuery = SPARQLQueryUtility.getSelectFromConstruct(strquery);
ResultSet resultSet = executeInThread(selectSparqlQuery, QueryType.CONSTRUCT, constructTemplate);
return resultSet;
=======
strquery = SPARQLQueryUtility.getSelectFromConstruct(strquery);
GraphResultSet executedGraphQuery = executeGraphQuery(strquery, 3);
return executedGraphQuery;
>>>>>>>
String selectSparqlQuery = SPARQLQueryUtility.getSelectFromConstruct(strquery);
ResultSet resultSet = executeInThread(selectSparqlQuery, QueryType.CONSTRUCT, constructTemplate);
return resultSet;
<<<<<<<
ImmutableList<String> signatureContainer = translator.getSignature(query);
=======
//SparqlAlgebraToDatalogTranslator translator = questInstance.getSparqlAlgebraToDatalogTranslator();
//List<String> signatureContainer = translator.getSignature(query);
>>>>>>>
ImmutableList<String> signatureContainer = translator.getSignature(query);
//SparqlAlgebraToDatalogTranslator translator = questInstance.getSparqlAlgebraToDatalogTranslator();
//List<String> signatureContainer = translator.getSignature(query);
<<<<<<<
@Override
public String getRewriting(ParsedQuery query, List<String> signature) throws OBDAException {
=======
public String getRewriting(ParsedQuery query) throws Exception {
>>>>>>>
@Override
public String getRewriting(ParsedQuery query) throws OBDAException {
<<<<<<<
@Override
public TargetQuery unfoldAndGenerateTargetQuery(String sparqlQuery) throws OBDAException {
return unfoldAndGenerateTargetQuery(sparqlQuery, null);
}
=======
public String getUnfolding(String strquery) throws Exception {
String sql = "";
// Check the cache first if the system has processed the query string
// before
if (ALLOW_QUERY_CACHING && questInstance.hasCachedSQL(strquery)) {
// Obtain immediately the SQL string from cache
sql = questInstance.getCachedSQL(strquery);
//signatureContainer = signaturecache.get(strquery);
//query = sesameQueryCache.get(strquery);
}
else {
>>>>>>>
@Override
public TargetQuery unfoldAndGenerateTargetQuery(String sparqlQuery) throws OBDAException {
return unfoldAndGenerateTargetQuery(sparqlQuery, null);
}
<<<<<<<
=======
sql = getSQL(programAfterUnfolding, signatureContainer);
// cacheQueryAndProperties(strquery, sql);
questInstance.cacheSQL(strquery, sql);
}
catch (Exception e1) {
log.debug(e1.getMessage(), e1);
>>>>>>>
<<<<<<<
targetQuery = generateTargetQuery(programAfterUnfolding, ImmutableList.copyOf(signatureContainer), constructTemplate);
queryCache.cacheTargetQuery(sparqlQuery, targetQuery);
=======
/**
* Returns the number of tuples returned by the query
*/
public long getTupleCount(String query) throws Exception {
String unf = getUnfolding(query);
String newsql = "SELECT count(*) FROM (" + unf + ") t1";
if (!canceled) {
ResultSet set = sqlstatement.executeQuery(newsql);
if (set.next()) {
return set.getLong(1);
} else {
throw new Exception("Tuple count failed due to empty result set.");
}
} else {
throw new Exception("Action canceled.");
>>>>>>>
targetQuery = generateTargetQuery(programAfterUnfolding, signatureContainer, constructTemplate);
queryCache.cacheTargetQuery(sparqlQuery, targetQuery);
<<<<<<<
=======
@Override
public int getFetchSize() throws OBDAException {
try {
return sqlstatement.getFetchSize();
} catch (Exception e) {
throw new OBDAException(e);
}
}
@Override
public int getMaxRows() throws OBDAException {
try {
return sqlstatement.getMaxRows();
} catch (Exception e) {
throw new OBDAException(e);
}
}
@Override
public void getMoreResults() throws OBDAException {
try {
sqlstatement.getMoreResults();
} catch (Exception e) {
throw new OBDAException(e);
}
}
@Override
public void setFetchSize(int rows) throws OBDAException {
try {
sqlstatement.setFetchSize(rows);
} catch (Exception e) {
throw new OBDAException(e);
}
}
@Override
public void setMaxRows(int max) throws OBDAException {
try {
sqlstatement.setMaxRows(max);
} catch (Exception e) {
throw new OBDAException(e);
}
}
@Override
public void setQueryTimeout(int seconds) throws OBDAException {
try {
sqlstatement.setQueryTimeout(seconds);
} catch (Exception e) {
throw new OBDAException(e);
}
}
@Override
public TupleResultSet getResultSet() throws OBDAException {
return null;
}
@Override
public int getQueryTimeout() throws OBDAException {
try {
return sqlstatement.getQueryTimeout();
} catch (Exception e) {
throw new OBDAException(e);
}
}
@Override
public boolean isClosed() throws OBDAException {
try {
return sqlstatement.isClosed();
} catch (Exception e) {
throw new OBDAException(e);
}
}
>>>>>>> |
<<<<<<<
import it.unibz.inf.ontop.model.atom.AtomFactory;
=======
import it.unibz.inf.ontop.spec.mapping.parser.impl.TurtleOBDAParser.*;
>>>>>>>
import it.unibz.inf.ontop.model.atom.AtomFactory;
<<<<<<<
import it.unibz.inf.ontop.model.vocabulary.XSD;
import it.unibz.inf.ontop.spec.mapping.parser.impl.TurtleOBDAParser.*;
=======
>>>>>>>
import it.unibz.inf.ontop.model.vocabulary.XSD;
import it.unibz.inf.ontop.spec.mapping.parser.impl.TurtleOBDAParser.*;
<<<<<<<
Term stringValue = visitStringLiteral(ctx.stringLiteral());
IRI iriRef = visitIri(ctx.iri());
return termFactory.getTypedTerm(stringValue, iriRef);
=======
Term stringValue = visitLitString(ctx.litString());
String iriRef = visitIri(ctx.iri());
Optional<COL_TYPE> type = TYPE_FACTORY.getDatatype(iriRef);
if (type.isPresent()) {
return TERM_FACTORY.getTypedTerm(stringValue, type.get());
}
throw new RuntimeException("Unsupported datatype: " + iriRef);
>>>>>>>
Term stringValue = visitLitString(ctx.litString());
IRI iriRef = visitIri(ctx.iri());
return termFactory.getTypedTerm(stringValue, iriRef);
<<<<<<<
public Term visitBooleanLiteral(BooleanLiteralContext ctx) {
return typeTerm(ctx.BOOLEAN_LITERAL().getText(), XSD.BOOLEAN);
=======
public Term visitUntypedBooleanLiteral(UntypedBooleanLiteralContext ctx) {
return typeTerm(ctx.BOOLEAN_LITERAL().getText(), COL_TYPE.BOOLEAN);
>>>>>>>
public Term visitUntypedBooleanLiteral(UntypedBooleanLiteralContext ctx) {
return typeTerm(ctx.BOOLEAN_LITERAL().getText(), XSD.BOOLEAN); |
<<<<<<<
=======
import it.unibz.inf.ontop.io.ModelIOManager;
import it.unibz.inf.ontop.model.OBDADataFactory;
import it.unibz.inf.ontop.model.OBDAModel;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
import it.unibz.inf.ontop.owlrefplatform.core.QuestConstants;
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
import it.unibz.inf.ontop.owlrefplatform.owlapi.QuestOWL;
import it.unibz.inf.ontop.owlrefplatform.owlapi.QuestOWLConfiguration;
import it.unibz.inf.ontop.owlrefplatform.owlapi.QuestOWLFactory;
>>>>>>>
import it.unibz.inf.ontop.owlrefplatform.owlapi.QuestOWL;
import it.unibz.inf.ontop.owlrefplatform.owlapi.QuestOWLConfiguration;
import it.unibz.inf.ontop.owlrefplatform.owlapi.QuestOWLFactory;
<<<<<<<
import it.unibz.inf.ontop.owlrefplatform.core.QuestConstants;
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWL;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLFactory;
=======
>>>>>>>
<<<<<<<
import java.util.Properties;
import static org.junit.Assert.assertFalse;
=======
import java.sql.Connection;
>>>>>>>
<<<<<<<
Properties p = new Properties();
p.put(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.put(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.put(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
>>>>>>>
// Creating a new instance of the reasoner
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder()
.nativeOntopMappingFile(new File(obdaFile))
.build();
QuestOWL reasoner = factory.createReasoner(ontology, config);
}
@Test(expected = ReasonerInternalException.class)
public void testWrongMappings() throws Exception { |
<<<<<<<
@Override
public DatalogProgram getDatalogProgram(Collection<CQIE> rules) {
DatalogProgram p = new DatalogProgramImpl();
p.appendRule(rules);
return p;
}
=======
>>>>>>>
<<<<<<<
public Expression getFunctionSubstring(Term term1, Term term2) {
return getExpression(ExpressionOperation.SUBSTR, term1, term2);
}
@Override
=======
>>>>>>> |
<<<<<<<
import info.aduna.io.FileUtil;
import info.aduna.io.ZipUtil;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.JarURLConnection;
import java.net.URL;
import java.util.jar.JarFile;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import org.openrdf.OpenRDFUtil;
import org.openrdf.model.Resource;
import org.openrdf.model.ValueFactory;
import org.openrdf.query.BindingSet;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.repository.util.RDFInserter;
import org.openrdf.rio.ParserConfig;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParser;
import org.openrdf.rio.Rio;
import org.openrdf.rio.helpers.BasicParserSettings;
import org.openrdf.sail.memory.MemoryStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RDB2RDFManifestUtils {
static final Logger logger = LoggerFactory.getLogger(RDB2RDFManifestUtils.class);
public static TestSuite suite(RDB2RDFScenarioParent.Factory factory) throws Exception {
final String manifestFile;
final File tmpDir;
URL url = RDB2RDFManifestUtils.class.getResource(factory.getMainManifestFile());
if ("jar".equals(url.getProtocol())) {
// Extract manifest files to a temporary directory
try {
tmpDir = FileUtil.createTempDir("scenario-evaluation");
JarURLConnection con = (JarURLConnection) url.openConnection();
JarFile jar = con.getJarFile();
ZipUtil.extract(jar, tmpDir);
File localFile = new File(tmpDir, con.getEntryName());
manifestFile = localFile.toURI().toURL().toString();
} catch (IOException e) {
throw new AssertionError(e);
}
} else {
manifestFile = url.toString();
tmpDir = null;
}
TestSuite suite = new TestSuite(factory.getClass().getName()) {
@Override
public void run(TestResult result) {
try {
super.run(result);
} finally {
if (tmpDir != null) {
try {
FileUtil.deleteDir(tmpDir);
} catch (IOException e) {
System.err
.println("Unable to clean up temporary directory '"
+ tmpDir + "': " + e.getMessage());
}
}
}
}
};
Repository manifestRep = new SailRepository(new MemoryStore());
manifestRep.initialize();
RepositoryConnection con = manifestRep.getConnection();
addTurtle(con, url, url.toString());
String query = "SELECT DISTINCT manifestFile FROM {x} rdf:first {manifestFile} "
+ "USING NAMESPACE mf = <http://obda.org/quest/tests/test-manifest#>, "
+ " qt = <http://obda.org/quest/tests/test-query#>";
TupleQueryResult manifestResults = (con.prepareTupleQuery(
QueryLanguage.SERQL, query, manifestFile)).evaluate();
while (manifestResults.hasNext()) {
BindingSet bindingSet = manifestResults.next();
String subManifestFile = bindingSet.getValue("manifestFile")
.toString();
suite.addTest(RDB2RDFScenarioParent.suite(subManifestFile, factory));
}
manifestResults.close();
con.close();
manifestRep.shutDown();
logger.info("Created aggregated test suite with "
+ suite.countTestCases() + " test cases.");
return suite;
}
static void addTurtle(RepositoryConnection con, URL url, String baseURI,
Resource... contexts) throws IOException, RepositoryException,
RDFParseException {
if (baseURI == null) {
baseURI = url.toExternalForm();
}
InputStream in = url.openStream();
try {
OpenRDFUtil.verifyContextNotNull(contexts);
final ValueFactory vf = con.getRepository().getValueFactory();
RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE, vf);
ParserConfig config = rdfParser.getParserConfig();
// To emulate DatatypeHandling.IGNORE
config.addNonFatalError(BasicParserSettings.FAIL_ON_UNKNOWN_DATATYPES);
config.addNonFatalError(BasicParserSettings.VERIFY_DATATYPE_VALUES);
config.addNonFatalError(BasicParserSettings.NORMALIZE_DATATYPE_VALUES);
// rdfParser.setVerifyData(false);
// rdfParser.setStopAtFirstError(true);
// rdfParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE);
RDFInserter rdfInserter = new RDFInserter(con);
rdfInserter.enforceContext(contexts);
rdfParser.setRDFHandler(rdfInserter);
con.begin();
try {
rdfParser.parse(in, baseURI);
} catch (RDFHandlerException e) {
con.rollback();
// RDFInserter only throws wrapped RepositoryExceptions
throw (new RepositoryException(e.getCause()));
} catch (RuntimeException e) {
con.rollback();
throw e;
}
} finally {
in.close();
}
}
}
=======
/*
* Copyright (C) 2009-2013, Free University of Bozen Bolzano
* This source code is available under the terms of the Affero General Public
* License v3.
*
* Please see LICENSE.txt for full license terms, including the availability of
* proprietary exceptions.
*/
import info.aduna.io.FileUtil;
import info.aduna.io.ZipUtil;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.JarURLConnection;
import java.net.URL;
import java.util.jar.JarFile;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import org.openrdf.OpenRDFUtil;
import org.openrdf.model.Resource;
import org.openrdf.model.ValueFactory;
import org.openrdf.query.BindingSet;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.repository.util.RDFInserter;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParser;
import org.openrdf.rio.turtle.TurtleParser;
import org.openrdf.sail.memory.MemoryStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RDB2RDFManifestUtils {
static final Logger logger = LoggerFactory.getLogger(RDB2RDFManifestUtils.class);
public static TestSuite suite(RDB2RDFScenarioParent.Factory factory) throws Exception {
final String manifestFile;
final File tmpDir;
URL url = RDB2RDFManifestUtils.class.getResource(factory.getMainManifestFile());
if ("jar".equals(url.getProtocol())) {
// Extract manifest files to a temporary directory
try {
tmpDir = FileUtil.createTempDir("scenario-evaluation");
JarURLConnection con = (JarURLConnection) url.openConnection();
JarFile jar = con.getJarFile();
ZipUtil.extract(jar, tmpDir);
File localFile = new File(tmpDir, con.getEntryName());
manifestFile = localFile.toURI().toURL().toString();
} catch (IOException e) {
throw new AssertionError(e);
}
} else {
manifestFile = url.toString();
tmpDir = null;
}
TestSuite suite = new TestSuite(factory.getClass().getName()) {
@Override
public void run(TestResult result) {
try {
super.run(result);
} finally {
if (tmpDir != null) {
try {
FileUtil.deleteDir(tmpDir);
} catch (IOException e) {
System.err
.println("Unable to clean up temporary directory '"
+ tmpDir + "': " + e.getMessage());
}
}
}
}
};
Repository manifestRep = new SailRepository(new MemoryStore());
manifestRep.initialize();
RepositoryConnection con = manifestRep.getConnection();
addTurtle(con, url, url.toString());
String query = "SELECT DISTINCT manifestFile FROM {x} rdf:first {manifestFile} "
+ "USING NAMESPACE mf = <http://obda.org/quest/tests/test-manifest#>, "
+ " qt = <http://obda.org/quest/tests/test-query#>";
TupleQueryResult manifestResults = con.prepareTupleQuery(
QueryLanguage.SERQL, query, manifestFile).evaluate();
while (manifestResults.hasNext()) {
BindingSet bindingSet = manifestResults.next();
String subManifestFile = bindingSet.getValue("manifestFile")
.toString();
suite.addTest(RDB2RDFScenarioParent.suite(subManifestFile, factory));
}
manifestResults.close();
con.close();
manifestRep.shutDown();
logger.info("Created aggregated test suite with "
+ suite.countTestCases() + " test cases.");
return suite;
}
static void addTurtle(RepositoryConnection con, URL url, String baseURI,
Resource... contexts) throws IOException, RepositoryException,
RDFParseException {
if (baseURI == null) {
baseURI = url.toExternalForm();
}
InputStream in = url.openStream();
try {
OpenRDFUtil.verifyContextNotNull(contexts);
final ValueFactory vf = con.getRepository().getValueFactory();
RDFParser rdfParser = new TurtleParser();
rdfParser.setValueFactory(vf);
rdfParser.setVerifyData(false);
rdfParser.setStopAtFirstError(true);
rdfParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE);
RDFInserter rdfInserter = new RDFInserter(con);
rdfInserter.enforceContext(contexts);
rdfParser.setRDFHandler(rdfInserter);
boolean autoCommit = con.isAutoCommit();
con.setAutoCommit(false);
try {
rdfParser.parse(in, baseURI);
} catch (RDFHandlerException e) {
if (autoCommit) {
con.rollback();
}
// RDFInserter only throws wrapped RepositoryExceptions
throw (RepositoryException) e.getCause();
} catch (RuntimeException e) {
if (autoCommit) {
con.rollback();
}
throw e;
} finally {
con.setAutoCommit(autoCommit);
}
} finally {
in.close();
}
}
}
>>>>>>>
/*
* Copyright (C) 2009-2013, Free University of Bozen Bolzano
* This source code is available under the terms of the Affero General Public
* License v3.
*
* Please see LICENSE.txt for full license terms, including the availability of
* proprietary exceptions.
*/
import info.aduna.io.FileUtil;
import info.aduna.io.ZipUtil;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.JarURLConnection;
import java.net.URL;
import java.util.jar.JarFile;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import org.openrdf.OpenRDFUtil;
import org.openrdf.model.Resource;
import org.openrdf.model.ValueFactory;
import org.openrdf.query.BindingSet;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.repository.util.RDFInserter;
import org.openrdf.rio.ParserConfig;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParser;
import org.openrdf.rio.Rio;
import org.openrdf.rio.helpers.BasicParserSettings;
import org.openrdf.sail.memory.MemoryStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RDB2RDFManifestUtils {
static final Logger logger = LoggerFactory.getLogger(RDB2RDFManifestUtils.class);
public static TestSuite suite(RDB2RDFScenarioParent.Factory factory) throws Exception {
final String manifestFile;
final File tmpDir;
URL url = RDB2RDFManifestUtils.class.getResource(factory.getMainManifestFile());
if ("jar".equals(url.getProtocol())) {
// Extract manifest files to a temporary directory
try {
tmpDir = FileUtil.createTempDir("scenario-evaluation");
JarURLConnection con = (JarURLConnection) url.openConnection();
JarFile jar = con.getJarFile();
ZipUtil.extract(jar, tmpDir);
File localFile = new File(tmpDir, con.getEntryName());
manifestFile = localFile.toURI().toURL().toString();
} catch (IOException e) {
throw new AssertionError(e);
}
} else {
manifestFile = url.toString();
tmpDir = null;
}
TestSuite suite = new TestSuite(factory.getClass().getName()) {
@Override
public void run(TestResult result) {
try {
super.run(result);
} finally {
if (tmpDir != null) {
try {
FileUtil.deleteDir(tmpDir);
} catch (IOException e) {
System.err
.println("Unable to clean up temporary directory '"
+ tmpDir + "': " + e.getMessage());
}
}
}
}
};
Repository manifestRep = new SailRepository(new MemoryStore());
manifestRep.initialize();
RepositoryConnection con = manifestRep.getConnection();
addTurtle(con, url, url.toString());
String query = "SELECT DISTINCT manifestFile FROM {x} rdf:first {manifestFile} "
+ "USING NAMESPACE mf = <http://obda.org/quest/tests/test-manifest#>, "
+ " qt = <http://obda.org/quest/tests/test-query#>";
TupleQueryResult manifestResults = con.prepareTupleQuery(
QueryLanguage.SERQL, query, manifestFile).evaluate();
while (manifestResults.hasNext()) {
BindingSet bindingSet = manifestResults.next();
String subManifestFile = bindingSet.getValue("manifestFile")
.toString();
suite.addTest(RDB2RDFScenarioParent.suite(subManifestFile, factory));
}
manifestResults.close();
con.close();
manifestRep.shutDown();
logger.info("Created aggregated test suite with "
+ suite.countTestCases() + " test cases.");
return suite;
}
static void addTurtle(RepositoryConnection con, URL url, String baseURI,
Resource... contexts) throws IOException, RepositoryException,
RDFParseException {
if (baseURI == null) {
baseURI = url.toExternalForm();
}
InputStream in = url.openStream();
try {
OpenRDFUtil.verifyContextNotNull(contexts);
final ValueFactory vf = con.getRepository().getValueFactory();
RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE, vf);
ParserConfig config = rdfParser.getParserConfig();
// To emulate DatatypeHandling.IGNORE
config.addNonFatalError(BasicParserSettings.FAIL_ON_UNKNOWN_DATATYPES);
config.addNonFatalError(BasicParserSettings.VERIFY_DATATYPE_VALUES);
config.addNonFatalError(BasicParserSettings.NORMALIZE_DATATYPE_VALUES);
// rdfParser.setVerifyData(false);
// rdfParser.setStopAtFirstError(true);
// rdfParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE);
RDFInserter rdfInserter = new RDFInserter(con);
rdfInserter.enforceContext(contexts);
rdfParser.setRDFHandler(rdfInserter);
con.begin();
try {
rdfParser.parse(in, baseURI);
} catch (RDFHandlerException e) {
con.rollback();
// RDFInserter only throws wrapped RepositoryExceptions
throw (new RepositoryException(e.getCause()));
} catch (RuntimeException e) {
con.rollback();
throw e;
}
} finally {
in.close();
}
}
} |
<<<<<<<
import java.io.*;
=======
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
>>>>>>>
import java.io.*;
<<<<<<<
import org.semanticweb.ontop.owlapi3.OBDAModelSynchronizer;
import org.semanticweb.ontop.owlapi3.OWLAPI3Translator;
import org.semanticweb.ontop.r2rml.R2RMLMappingParser;
=======
import org.semanticweb.ontop.owlapi3.OWLAPI3TranslatorUtility;
import org.semanticweb.ontop.r2rml.R2RMLReader;
>>>>>>>
import org.semanticweb.ontop.owlapi3.OWLAPI3TranslatorUtility;
import org.semanticweb.ontop.r2rml.R2RMLMappingParser; |
<<<<<<<
TARGET_QUERY_PARSER_FACTORY = injector.getInstance(TargetQueryParserFactory.class);
=======
CORE_UTILS_FACTORY = injector.getInstance(CoreUtilsFactory.class);
>>>>>>>
TARGET_QUERY_PARSER_FACTORY = injector.getInstance(TargetQueryParserFactory.class);
CORE_UTILS_FACTORY = injector.getInstance(CoreUtilsFactory.class); |
<<<<<<<
import org.semanticweb.ontop.mapping.QueryUtils;
=======
import org.semanticweb.ontop.model.impl.VariableImpl;
import org.semanticweb.ontop.utils.QueryUtils;
>>>>>>>
import org.semanticweb.ontop.mapping.QueryUtils;
import org.semanticweb.ontop.model.impl.VariableImpl; |
<<<<<<<
import java.io.File;
import java.util.List;
import java.util.Properties;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWL;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLConnection;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLFactory;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLStatement;
import junit.framework.TestCase;
import org.junit.Assert;
import org.junit.Test;
import it.unibz.inf.ontop.owlrefplatform.core.QuestConstants;
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
=======
import it.unibz.inf.ontop.model.OBDADataFactory;
import it.unibz.inf.ontop.model.OBDAModel;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
import it.unibz.inf.ontop.owlrefplatform.core.QuestConstants;
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
import it.unibz.inf.ontop.owlrefplatform.owlapi.*;
import org.junit.*;
>>>>>>>
import it.unibz.inf.ontop.owlrefplatform.owlapi.*;
import org.junit.*;
<<<<<<<
public class OWLConstructDescribeTest extends TestCase{
OWLOntology ontology = null;
=======
@Ignore // GUOHUI: 2016-01-16 SI+Mapping mode is disabled
public class OWLConstructDescribeTest{
OWLOntology ontology = null;
OBDAModel obdaModel = null;
>>>>>>>
@Ignore // GUOHUI: 2016-01-16 SI+Mapping mode is disabled
public class OWLConstructDescribeTest{
OWLOntology ontology = null;
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.CLASSIC);
p.setProperty(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setProperty(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
p.setProperty(QuestPreferences.OBTAIN_FROM_MAPPINGS, "false");
p.setProperty(QuestPreferences.OBTAIN_FROM_ONTOLOGY, "true");
p.setProperty(QuestPreferences.DBTYPE, QuestConstants.SEMANTIC_INDEX);
p.setProperty(QuestPreferences.STORAGE_LOCATION, QuestConstants.INMEMORY);
p.setProperty(QuestPreferences.REWRITE, "false");
p.setProperty(QuestPreferences.REFORMULATION_TECHNIQUE, QuestConstants.TW);
QuestPreferences preferences = new QuestPreferences(p);
QuestOWLFactory factory = new QuestOWLFactory(preferences);
reasoner = (QuestOWL) factory.createReasoner(ontology, new SimpleConfiguration());
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.CLASSIC);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setCurrentValueOf(QuestPreferences.OBTAIN_FROM_MAPPINGS, "false");
p.setCurrentValueOf(QuestPreferences.OBTAIN_FROM_ONTOLOGY, "true");
p.setCurrentValueOf(QuestPreferences.DBTYPE, QuestConstants.SEMANTIC_INDEX);
p.setCurrentValueOf(QuestPreferences.STORAGE_LOCATION, QuestConstants.INMEMORY);
p.setCurrentValueOf(QuestPreferences.REWRITE, "false");
p.setCurrentValueOf(QuestPreferences.REFORMULATION_TECHNIQUE, QuestConstants.TW);
QuestOWLFactory factory = new QuestOWLFactory();
// factory.setOBDAController(obdaModel);
//factory.setPreferenceHolder(p);
//reasoner.setPreferences(preferences);
//reasoner = factory.createReasoner(ontology, new SimpleConfiguration());
QuestOWLConfiguration config = QuestOWLConfiguration.builder().obdaModel(obdaModel).build();
reasoner = factory.createReasoner(ontology, config);
>>>>>>>
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder().build();
reasoner = factory.createReasoner(ontology, config); |
<<<<<<<
@Override
protected BeanSerializerBase withIgnorals(Set<String> toIgnore, Set<String> toInclude) {
=======
@Override // @since 2.12
protected BeanSerializerBase withByNameInclusion(Set<String> toIgnore, Set<String> toInclude) {
>>>>>>>
@Override
protected BeanSerializerBase withByNameInclusion(Set<String> toIgnore, Set<String> toInclude) { |
<<<<<<<
private final DatalogRule2QueryConverter datalogRuleConverter;
=======
private final RootConstructionNodeEnforcer rootCnEnforcer;
>>>>>>>
private final RootConstructionNodeEnforcer rootCnEnforcer;
private final DatalogRule2QueryConverter datalogRuleConverter;
<<<<<<<
UnionBasedQueryMerger queryMerger,
DatalogRule2QueryConverter datalogRuleConverter) {
=======
UnionBasedQueryMerger queryMerger,
RootConstructionNodeEnforcer rootCnEnforcer) {
>>>>>>>
UnionBasedQueryMerger queryMerger,
RootConstructionNodeEnforcer rootCnEnforcer,
DatalogRule2QueryConverter datalogRuleConverter) {
<<<<<<<
this.datalogRuleConverter = datalogRuleConverter;
=======
this.rootCnEnforcer = rootCnEnforcer;
>>>>>>>
this.rootCnEnforcer = rootCnEnforcer;
this.datalogRuleConverter = datalogRuleConverter; |
<<<<<<<
String sqlQuery = extractSQLQuery(executableQuery);
ConstructionNode constructionNode = extractRootConstructionNode(executableQuery);
ImmutableList<Variable> signature = extractSignature(executableQuery);
try {
java.sql.ResultSet set = sqlStatement.executeQuery(sqlQuery);
return settings.isDistinctPostProcessingEnabled()
? new SQLDistinctTupleResultSet(set, signature, constructionNode, dbMetadata, iriDictionary,
termFactory, typeFactory)
: new DelegatedIriSQLTupleResultSet(set, signature, constructionNode, dbMetadata, iriDictionary,
termFactory, typeFactory);
} catch (SQLException e) {
throw new OntopQueryEvaluationException(e);
}
} catch (EmptyQueryException e) {
return new EmptyTupleResultSet(extractSignature(executableQuery));
=======
java.sql.ResultSet set = sqlStatement.executeQuery(sqlQuery);
return settings.isDistinctPostProcessingEnabled()
? new SQLDistinctTupleResultSet(set, executableQuery.getSignature(), dbMetadata, iriDictionary,
termFactory, typeFactory, rdfFactory)
: new DelegatedIriSQLTupleResultSet(set, executableQuery.getSignature(), dbMetadata, iriDictionary,
termFactory, typeFactory, rdfFactory);
} catch (SQLException e) {
throw new OntopQueryEvaluationException(e);
>>>>>>>
String sqlQuery = extractSQLQuery(executableQuery);
ConstructionNode constructionNode = extractRootConstructionNode(executableQuery);
ImmutableList<Variable> signature = extractSignature(executableQuery);
try {
java.sql.ResultSet set = sqlStatement.executeQuery(sqlQuery);
return settings.isDistinctPostProcessingEnabled()
? new SQLDistinctTupleResultSet(set, signature, constructionNode, dbMetadata, iriDictionary,
termFactory, typeFactory, rdfFactory)
: new DelegatedIriSQLTupleResultSet(set, signature, constructionNode, dbMetadata, iriDictionary,
termFactory, typeFactory, rdfFactory);
} catch (SQLException e) {
throw new OntopQueryEvaluationException(e);
}
} catch (EmptyQueryException e) {
return new EmptyTupleResultSet(extractSignature(executableQuery));
<<<<<<<
tuples = new DelegatedIriSQLTupleResultSet(set, signature, constructionNode, dbMetadata,
iriDictionary, termFactory, typeFactory);
=======
tuples = new DelegatedIriSQLTupleResultSet(set, executableQuery.getSignature(), dbMetadata,
iriDictionary, termFactory, typeFactory, rdfFactory);
>>>>>>>
tuples = new DelegatedIriSQLTupleResultSet(set, signature, constructionNode, dbMetadata,
iriDictionary, termFactory, typeFactory, rdfFactory); |
<<<<<<<
import it.unibz.inf.ontop.spec.ontology.impl.OntologyFactoryImpl;
=======
import it.unibz.inf.ontop.spec.ontology.impl.ClassifiedTBoxImpl;
import it.unibz.inf.ontop.spec.ontology.impl.OntologyBuilderImpl;
import it.unibz.inf.ontop.datalog.impl.CQCUtilities;
>>>>>>>
import it.unibz.inf.ontop.spec.ontology.impl.OntologyBuilderImpl;
<<<<<<<
LinearInclusionDependencies dep = INCLUSION_DEPENDENCY_TOOLS.getABoxDependencies(TBoxReasonerImpl.create(sigma), false);
=======
LinearInclusionDependencies dep = LinearInclusionDependencyTools.getABoxDependencies(sigma, false);
>>>>>>>
LinearInclusionDependencies dep = INCLUSION_DEPENDENCY_TOOLS.getABoxDependencies(sigma, false);
<<<<<<<
LinearInclusionDependencies dep = INCLUSION_DEPENDENCY_TOOLS.getABoxDependencies(TBoxReasonerImpl.create(sigma), false);
CQContainmentCheckUnderLIDs cqc = new CQContainmentCheckUnderLIDs(dep, DATALOG_FACTORY, UNIFIER_UTILITIES,
SUBSTITUTION_UTILITIES, TERM_FACTORY);
=======
LinearInclusionDependencies dep = LinearInclusionDependencyTools.getABoxDependencies(sigma, false);
CQContainmentCheckUnderLIDs cqc = new CQContainmentCheckUnderLIDs(dep);
>>>>>>>
LinearInclusionDependencies dep = INCLUSION_DEPENDENCY_TOOLS.getABoxDependencies(sigma, false);
CQContainmentCheckUnderLIDs cqc = new CQContainmentCheckUnderLIDs(dep, DATALOG_FACTORY, UNIFIER_UTILITIES,
SUBSTITUTION_UTILITIES, TERM_FACTORY); |
<<<<<<<
import it.unibz.inf.ontop.iq.node.normalization.ConditionSimplifier.ExpressionAndSubstitution;
import it.unibz.inf.ontop.iq.node.normalization.ConditionSimplifier;
import it.unibz.inf.ontop.iq.node.normalization.InnerJoinNormalizer;
import it.unibz.inf.ontop.iq.node.normalization.impl.JoinLikeChildBindingLifter;
import it.unibz.inf.ontop.iq.transform.IQTransformer;
=======
import it.unibz.inf.ontop.iq.transform.IQTreeVisitingTransformer;
>>>>>>>
import it.unibz.inf.ontop.iq.node.normalization.ConditionSimplifier.ExpressionAndSubstitution;
import it.unibz.inf.ontop.iq.node.normalization.ConditionSimplifier;
import it.unibz.inf.ontop.iq.node.normalization.InnerJoinNormalizer;
import it.unibz.inf.ontop.iq.node.normalization.impl.JoinLikeChildBindingLifter;
import it.unibz.inf.ontop.iq.transform.IQTreeVisitingTransformer; |
<<<<<<<
import it.unibz.inf.ontop.spec.ontology.TBoxReasoner;
=======
import it.unibz.inf.ontop.datalog.LinearInclusionDependencies;
import it.unibz.inf.ontop.spec.ontology.ClassifiedTBox;
>>>>>>>
import it.unibz.inf.ontop.datalog.LinearInclusionDependencies;
import it.unibz.inf.ontop.spec.ontology.ClassifiedTBox;
<<<<<<<
ImmutableSet<CQIE> saturatedMappingRules = tMappingProcessor.getTMappings(initialMappingRules, saturatedTBox,
true,
=======
ImmutableSet<CQIE> saturatedMappingRules = TMappingProcessor.getTMappings(initialMappingRules, saturatedTBox,
>>>>>>>
ImmutableSet<CQIE> saturatedMappingRules = tMappingProcessor.getTMappings(initialMappingRules, saturatedTBox, |
<<<<<<<
private class TMappingIndexEntry implements Iterable<TMappingRule> {
=======
private static final boolean noCQC = false;
// TODO: the implementation of EXCLUDE ignores equivalent classes / properties
private static class TMappingIndexEntry implements Iterable<TMappingRule> {
>>>>>>>
// TODO: the implementation of EXCLUDE ignores equivalent classes / properties
private class TMappingIndexEntry implements Iterable<TMappingRule> {
<<<<<<<
AtomPredicate currentPredicate = atomFactory.getObjectPropertyPredicate(current.getIRI());
TMappingIndexEntry currentNodeMappings = getMappings(mappingIndex, currentPredicate);
=======
Predicate currentPredicate = representative.getPredicate();
TMappingIndexEntry currentNodeMappings = getMappings(mappingIndex, currentPredicate);
>>>>>>>
Predicate currentPredicate = atomFactory.getObjectPropertyPredicate(representative.getIRI());
TMappingIndexEntry currentNodeMappings = getMappings(mappingIndex, currentPredicate);
<<<<<<<
boolean requiresInverse = childproperty.isInverse();
AtomPredicate childPredicate = atomFactory.getObjectPropertyPredicate(childproperty.getIRI());
List<TMappingRule> childmappings = originalMappings.get(childPredicate);
=======
List<TMappingRule> childmappings = originalMappings.get(childproperty.getPredicate());
>>>>>>>
List<TMappingRule> childmappings = originalMappings.get(
atomFactory.getObjectPropertyPredicate(childproperty.getIRI()));
<<<<<<<
Function newMappingHead;
if (!requiresInverse) {
if (!full)
continue;
newMappingHead = termFactory.getFunction(currentPredicate, terms);
}
else {
newMappingHead = termFactory.getFunction(currentPredicate, terms.get(1), terms.get(0));
}
TMappingRule newmapping = new TMappingRule(newMappingHead, childmapping, datalogFactory, termFactory, eqNormalizer);
=======
TMappingRule newmapping = new TMappingRule(newMappingHead, childmapping);
>>>>>>>
TMappingRule newmapping = new TMappingRule(newMappingHead, childmapping, datalogFactory,
termFactory, eqNormalizer);
<<<<<<<
Predicate p = atomFactory.getObjectPropertyPredicate(equivProperty.getIRI());
// skip the property and its inverse (if it is symmetric)
if (p.equals(current.getIRI()))
continue;
TMappingIndexEntry equivalentPropertyMappings = getMappings(mappingIndex, p);
for (TMappingRule currentNodeMapping : currentNodeMappings) {
List<Term> terms = currentNodeMapping.getHeadTerms();
Function newhead;
if (!equivProperty.isInverse())
newhead = termFactory.getFunction(p, terms);
else
newhead = termFactory.getFunction(p, terms.get(1), terms.get(0));
TMappingRule newrule = new TMappingRule(newhead, currentNodeMapping, datalogFactory, termFactory, eqNormalizer);
equivalentPropertyMappings.mergeMappingsWithCQC(newrule);
}
=======
if (!equivProperty.isInverse())
setMappings(mappingIndex, equivProperty.getPredicate(), currentNodeMappings);
>>>>>>>
if (!equivProperty.isInverse())
setMappings(mappingIndex, atomFactory.getObjectPropertyPredicate(equivProperty.getIRI()), currentNodeMappings);
<<<<<<<
Predicate p = atomFactory.getDataPropertyPredicate(equivProperty.getIRI());
// skip the property and its inverse (if it is symmetric)
if (p.equals(currentPredicate))
continue;
TMappingIndexEntry equivalentPropertyMappings = getMappings(mappingIndex, p);
for (TMappingRule currentNodeMapping : currentNodeMappings) {
Function newhead = termFactory.getFunction(p, currentNodeMapping.getHeadTerms());
TMappingRule newrule = new TMappingRule(newhead, currentNodeMapping, datalogFactory, termFactory, eqNormalizer);
equivalentPropertyMappings.mergeMappingsWithCQC(newrule);
}
=======
setMappings(mappingIndex, equivProperty.getPredicate(), currentNodeMappings);
>>>>>>>
setMappings(mappingIndex, atomFactory.getDataPropertyPredicate(equivProperty.getIRI()), currentNodeMappings);
<<<<<<<
public List<CQIE> getTMappings(List<CQIE> originalMappings, TBoxReasoner reasoner, boolean full, CQContainmentCheckUnderLIDs cqc, TMappingExclusionConfig excludeFromTMappings) {
=======
public static List<CQIE> getTMappings(List<CQIE> originalMappings, ClassifiedTBox reasoner, CQContainmentCheckUnderLIDs cqc, TMappingExclusionConfig excludeFromTMappings) {
>>>>>>>
public List<CQIE> getTMappings(List<CQIE> originalMappings, ClassifiedTBox reasoner, CQContainmentCheckUnderLIDs cqc, TMappingExclusionConfig excludeFromTMappings) {
<<<<<<<
Predicate currentPredicate = atomFactory.getClassPredicate(current.getIRI());
=======
Predicate currentPredicate = representative.getPredicate();
>>>>>>>
Predicate currentPredicate = atomFactory.getClassPredicate(representative.getIRI());
<<<<<<<
if (!full)
continue;
childPredicate = atomFactory.getClassPredicate(((OClass) childDescription).getIRI());
isClass = true;
isInverse = false;
/*
* USE OF excludeFromTMappings
*/
if(excludeFromTMappings.contains((OClass) childDescription)){
continue;
}
}
=======
childPredicate = ((OClass) childDescription).getPredicate();
arg = 0;
}
>>>>>>>
childPredicate = atomFactory.getClassPredicate(((OClass) childDescription).getIRI());
arg = 0;
}
<<<<<<<
childPredicate = atomFactory.getObjectPropertyPredicate(some.getIRI());
isClass = false;
isInverse = some.isInverse();
=======
childPredicate = some.getPredicate();
arg = some.isInverse() ? 1 : 0;
>>>>>>>
childPredicate = atomFactory.getObjectPropertyPredicate(some.getIRI());
arg = some.isInverse() ? 1 : 0;
<<<<<<<
childPredicate = atomFactory.getDataPropertyPredicate(some.getIRI());
isClass = false;
isInverse = false; // can never be an inverse
=======
childPredicate = some.getPredicate();
arg = 0; // can never be an inverse
>>>>>>>
childPredicate = atomFactory.getDataPropertyPredicate(some.getIRI());
arg = 0; // can never be an inverse
<<<<<<<
Function newMappingHead;
if (isClass) {
newMappingHead = termFactory.getFunction(currentPredicate, terms);
}
else {
if (!isInverse)
newMappingHead = termFactory.getFunction(currentPredicate, terms.get(0));
else
newMappingHead = termFactory.getFunction(currentPredicate, terms.get(1));
}
TMappingRule newmapping = new TMappingRule(newMappingHead, childmapping, datalogFactory, termFactory, eqNormalizer);
=======
Function newMappingHead = TERM_FACTORY.getFunction(currentPredicate, terms.get(arg));
TMappingRule newmapping = new TMappingRule(newMappingHead, childmapping);
>>>>>>>
Function newMappingHead = termFactory.getFunction(currentPredicate, terms.get(arg));
TMappingRule newmapping = new TMappingRule(newMappingHead, childmapping, datalogFactory,
termFactory, eqNormalizer);
<<<<<<<
if (!(equiv instanceof OClass) || equiv.equals(current))
continue;
Predicate p = atomFactory.getClassPredicate(((OClass) equiv).getIRI());
TMappingIndexEntry equivalentClassMappings = getMappings(mappingIndex, p);
for (TMappingRule currentNodeMapping : currentNodeMappings) {
Function newhead = termFactory.getFunction(p, currentNodeMapping.getHeadTerms());
TMappingRule newrule = new TMappingRule(newhead, currentNodeMapping, datalogFactory, termFactory, eqNormalizer);
equivalentClassMappings.mergeMappingsWithCQC(newrule);
}
=======
if (equiv instanceof OClass)
setMappings(mappingIndex, ((OClass) equiv).getPredicate(), currentNodeMappings);
>>>>>>>
if (equiv instanceof OClass)
setMappings(mappingIndex, atomFactory.getClassPredicate(((OClass) equiv).getIRI()), currentNodeMappings); |
<<<<<<<
import eu.optique.api.mapping.impl.rdf4j.RDF4JR2RMLMappingManagerFactory;
=======
import eu.optique.api.mapping.impl.sesame.SesameR2RMLMappingManagerFactory;
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
>>>>>>>
import eu.optique.api.mapping.impl.rdf4j.RDF4JR2RMLMappingManagerFactory;
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
<<<<<<<
import it.unibz.inf.ontop.model.OBDAMappingAxiom;
import it.unibz.inf.ontop.model.OBDAModel;
import org.eclipse.rdf4j.model.Graph;
import org.eclipse.rdf4j.model.Model;
import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.model.impl.GraphImpl;
import org.eclipse.rdf4j.rio.RDFFormat;
import org.eclipse.rdf4j.rio.Rio;
=======
import it.unibz.inf.ontop.model.*;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
import org.openrdf.model.Graph;
import org.openrdf.model.Model;
import org.openrdf.model.Statement;
import org.openrdf.model.impl.GraphImpl;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.Rio;
>>>>>>>
import it.unibz.inf.ontop.model.Function;
import it.unibz.inf.ontop.model.OBDAMappingAxiom;
import it.unibz.inf.ontop.model.OBDAModel;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
import org.eclipse.rdf4j.model.Graph;
import org.eclipse.rdf4j.model.Model;
import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.model.impl.GraphImpl;
import org.eclipse.rdf4j.rio.RDFFormat;
import org.eclipse.rdf4j.rio.Rio;
<<<<<<<
R2RMLMappingManager mm = new RDF4JR2RMLMappingManagerFactory().getR2RMLMappingManager();
Collection<TriplesMap> coll = getTriplesMaps();
=======
R2RMLMappingManager mm = new SesameR2RMLMappingManagerFactory().getR2RMLMappingManager();
Collection<TriplesMap> coll = getTripleMaps();
>>>>>>>
R2RMLMappingManager mm = new RDF4JR2RMLMappingManagerFactory().getR2RMLMappingManager();
Collection<TriplesMap> coll = getTripleMaps(); |
<<<<<<<
import com.google.common.collect.ImmutableList;
import com.google.inject.Module;
import it.unibz.inf.ontop.generation.algebra.*;
import it.unibz.inf.ontop.generation.normalization.DialectExtraNormalizer;
import it.unibz.inf.ontop.generation.serializer.SelectFromWhereSerializer;
=======
import com.google.common.collect.ImmutableList;
import com.google.inject.Module;
import it.unibz.inf.ontop.dbschema.DBMetadataProvider;
import it.unibz.inf.ontop.dbschema.impl.JDBCMetadataProviderFactory;
>>>>>>>
import com.google.common.collect.ImmutableList;
import com.google.inject.Module;
import it.unibz.inf.ontop.generation.algebra.*;
import it.unibz.inf.ontop.generation.normalization.DialectExtraNormalizer;
import it.unibz.inf.ontop.generation.serializer.SelectFromWhereSerializer;
import it.unibz.inf.ontop.dbschema.DBMetadataProvider;
import it.unibz.inf.ontop.dbschema.impl.JDBCMetadataProviderFactory;
<<<<<<<
bindFromSettings(SelectFromWhereSerializer.class);
bindFromSettings(IQTree2SelectFromWhereConverter.class);
bindFromSettings(DialectExtraNormalizer.class);
bindFromSettings(IQTree2NativeNodeGenerator.class);
Module sqlAlgebraFactory = buildFactory(
ImmutableList.of(
SelectFromWhereWithModifiers.class,
SQLSerializedQuery.class,
SQLTable.class,
SQLInnerJoinExpression.class,
SQLLeftJoinExpression.class,
SQLNaryJoinExpression.class,
SQLUnionExpression.class,
SQLOneTupleDummyQueryExpression.class,
SQLOrderComparator.class
),
SQLAlgebraFactory.class);
install(sqlAlgebraFactory);
=======
Module mdProvider = buildFactory(ImmutableList.of(DBMetadataProvider.class), JDBCMetadataProviderFactory.class);
install(mdProvider);
>>>>>>>
bindFromSettings(SelectFromWhereSerializer.class);
bindFromSettings(IQTree2SelectFromWhereConverter.class);
bindFromSettings(DialectExtraNormalizer.class);
bindFromSettings(IQTree2NativeNodeGenerator.class);
Module sqlAlgebraFactory = buildFactory(
ImmutableList.of(
SelectFromWhereWithModifiers.class,
SQLSerializedQuery.class,
SQLTable.class,
SQLInnerJoinExpression.class,
SQLLeftJoinExpression.class,
SQLNaryJoinExpression.class,
SQLUnionExpression.class,
SQLOneTupleDummyQueryExpression.class,
SQLOrderComparator.class
),
SQLAlgebraFactory.class);
install(sqlAlgebraFactory);
Module mdProvider = buildFactory(ImmutableList.of(DBMetadataProvider.class), JDBCMetadataProviderFactory.class);
install(mdProvider); |
<<<<<<<
@Override
protected ImmutableList<String> getExpectedValuesDuration1() {
return ImmutableList.of("0", "0", "0", "0", "0", "18.000", "20.000", "54.500");
}
@Override
protected ImmutableList<String> getExpectedValuesMultitypedAvg1() {
return ImmutableList.of("15.500000000000000000000000", "16.000000000000000000000000", "18.875000000000000000000000");
}
@Override
protected ImmutableList<String> getExpectedValuesMultitypedSum1(){
return ImmutableList.of("31.000", "32.000", "75.500");
}
=======
@Override
protected OntopOWLStatement createStatement() throws OWLException {
return CONNECTION.createStatement();
}
@AfterClass
public static void after() throws OWLException {
CONNECTION.close();
REASONER.dispose();
}
>>>>>>>
@Override
protected OntopOWLStatement createStatement() throws OWLException {
return CONNECTION.createStatement();
}
@AfterClass
public static void after() throws OWLException {
CONNECTION.close();
REASONER.dispose();
}
@Override
protected ImmutableList<String> getExpectedValuesDuration1() {
return ImmutableList.of("0", "0", "0", "0", "0", "18.000", "20.000", "54.500");
}
@Override
protected ImmutableList<String> getExpectedValuesMultitypedAvg1() {
return ImmutableList.of("15.500000000000000000000000", "16.000000000000000000000000", "18.875000000000000000000000");
}
@Override
protected ImmutableList<String> getExpectedValuesMultitypedSum1(){
return ImmutableList.of("31.000", "32.000", "75.500");
} |
<<<<<<<
=======
import java.util.*;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.ValueFactory;
import org.openrdf.model.impl.ValueFactoryImpl;
>>>>>>>
<<<<<<<
pred = OBDADataFactoryImpl.getInstance().getPredicate(((ValueConstant)(predf.getTerm(0))).getValue(), 1);
predUri = vf.createIRI(pred.getName());
=======
pred = DATA_FACTORY.getPredicate(((ValueConstant)(predf.getTerm(0))).getValue(), 1);
predUri = vf.createURI(pred.getName());
>>>>>>>
pred = DATA_FACTORY.getPredicate(((ValueConstant)(predf.getTerm(0))).getValue(), 1);
predUri = vf.createIRI(pred.getName());
<<<<<<<
pred = OBDADataFactoryImpl.getInstance().getPredicate(((ValueConstant)(predf.getTerm(0))).getValue(), 1);
predUri = vf.createIRI(pred.getName());
=======
pred = DATA_FACTORY.getPredicate(((ValueConstant)(predf.getTerm(0))).getValue(), 1);
predUri = vf.createURI(pred.getName());
>>>>>>>
pred = DATA_FACTORY.getPredicate(((ValueConstant)(predf.getTerm(0))).getValue(), 1);
predUri = vf.createIRI(pred.getName()); |
<<<<<<<
import org.semanticweb.owlapi.io.ToStringRenderer;
=======
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyManager;
>>>>>>>
<<<<<<<
String results = runTestQuery(queryBind);
assertEquals("\"ημερομηνία_γέννησης\"@el", results);
=======
int results = runTestQuery(p, queryBind);
assertEquals(4, results);
>>>>>>>
int results = runTestQuery(queryBind);
assertEquals(4, results);
<<<<<<<
String results = runTestQuery(queryBind);
assertEquals("<http://www.imdb.com/title/Bästisar>", results);
=======
int results = runTestQuery(p, queryBind);
assertEquals(444090, results);
>>>>>>>
int results = runTestQuery(queryBind);
assertEquals(444090, results);
<<<<<<<
String results = runTestQuery(queryBind);
assertEquals("\"2006\"", results);
=======
int results = runTestQuery(p, queryBind);
assertEquals(443300, results);
>>>>>>>
int results = runTestQuery(queryBind);
assertEquals(443300, results);
<<<<<<<
String results = runTestQuery(queryBind);
assertEquals("\"$446,237 (Worldwide)\"^^xsd:string", results);
=======
int results = runTestQuery(p, queryBind);
assertEquals(112576, results);
>>>>>>>
int results = runTestQuery(queryBind);
assertEquals(112576, results);
<<<<<<<
String results = runTestQuery(queryBind);
assertEquals("\"389486\"^^xsd:integer", results);
=======
int results = runTestQuery(p, queryBind);
assertEquals(876722, results);
>>>>>>>
int results = runTestQuery(queryBind);
assertEquals(876722, results);
<<<<<<<
String results = runTestQuery(queryBind);
assertEquals("<1>", results);
=======
int results = runTestQuery(p, queryBind);
assertEquals(7530011, results);
>>>>>>>
int results = runTestQuery(queryBind);
assertEquals(7530011, results);
<<<<<<<
String results = runTestQuery(queryBind);
assertEquals("<http://www.imdb.com/name/1>", results);
=======
int results = runTestQuery(p, queryBind);
assertEquals(7530011, results);
>>>>>>>
int results = runTestQuery(queryBind);
assertEquals(7530011, results);
<<<<<<<
String results = runTestQuery(queryBind);
assertEquals("\"113564\"^^xsd:int", results);
=======
int results = runTestQuery(p, queryBind);
assertEquals(705859, results);
>>>>>>>
int results = runTestQuery(queryBind);
assertEquals(705859, results); |
<<<<<<<
import it.unibz.inf.ontop.iq.transform.IQTransformer;
import it.unibz.inf.ontop.iq.visit.IQVisitor;
=======
import it.unibz.inf.ontop.iq.transform.IQTreeVisitingTransformer;
>>>>>>>
import it.unibz.inf.ontop.iq.transform.IQTreeVisitingTransformer;
import it.unibz.inf.ontop.iq.visit.IQVisitor; |
<<<<<<<
/*
* Copyright Aduna (http://www.aduna-software.com/) (c) 1997-2008.
*
* Licensed under the Aduna BSD-style license.
*/
package it.unibz.krdb.obda.quest.sparql;
import info.aduna.io.FileUtil;
import info.aduna.io.ZipUtil;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.JarURLConnection;
import java.net.URL;
import java.util.jar.JarFile;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import org.openrdf.OpenRDFUtil;
import org.openrdf.model.Resource;
import org.openrdf.model.ValueFactory;
import org.openrdf.query.BindingSet;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.repository.util.RDFInserter;
import org.openrdf.rio.ParserConfig;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParser;
import org.openrdf.rio.Rio;
import org.openrdf.rio.helpers.BasicParserSettings;
import org.openrdf.rio.turtle.TurtleParser;
import org.openrdf.sail.memory.MemoryStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QuestManifestTestUtils {
static final Logger logger = LoggerFactory.getLogger(QuestManifestTestUtils.class);
public static TestSuite suite(SPARQLQueryParent.Factory factory) throws Exception
{
final String manifestFile;
final File tmpDir;
URL url = QuestManifestTestUtils.class.getResource("/testcases-dawg-quest/data-r2/manifest-evaluation.ttl");
if ("jar".equals(url.getProtocol())) {
// Extract manifest files to a temporary directory
try {
tmpDir = FileUtil.createTempDir("sparql-evaluation");
JarURLConnection con = (JarURLConnection)url.openConnection();
JarFile jar = con.getJarFile();
ZipUtil.extract(jar, tmpDir);
File localFile = new File(tmpDir, con.getEntryName());
manifestFile = localFile.toURI().toURL().toString();
}
catch (IOException e) {
throw new AssertionError(e);
}
}
else {
manifestFile = url.toString();
tmpDir = null;
}
TestSuite suite = new TestSuite(factory.getClass().getName()) {
@Override
public void run(TestResult result) {
try {
super.run(result);
}
finally {
if (tmpDir != null) {
try {
FileUtil.deleteDir(tmpDir);
}
catch (IOException e) {
System.err.println("Unable to clean up temporary directory '" + tmpDir + "': " + e.getMessage());
}
}
}
}
};
Repository manifestRep = new SailRepository(new MemoryStore());
manifestRep.initialize();
RepositoryConnection con = manifestRep.getConnection();
addTurtle(con, new URL(manifestFile), manifestFile);
String query = "SELECT DISTINCT manifestFile FROM {x} rdf:first {manifestFile} "
+ "USING NAMESPACE mf = <http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#>, "
+ " qt = <http://www.w3.org/2001/sw/DataAccess/tests/test-query#>";
TupleQueryResult manifestResults = con.prepareTupleQuery(QueryLanguage.SERQL, query, manifestFile).evaluate();
while (manifestResults.hasNext()) {
BindingSet bindingSet = manifestResults.next();
String subManifestFile = bindingSet.getValue("manifestFile").toString();
suite.addTest(SPARQLQueryParent.suite(subManifestFile, factory));
}
manifestResults.close();
con.close();
manifestRep.shutDown();
logger.info("Created aggregated test suite with " + suite.countTestCases() + " test cases.");
return suite;
}
static void addTurtle(RepositoryConnection con, URL url, String baseURI, Resource... contexts)
throws IOException, RepositoryException, RDFParseException
{
if (baseURI == null) {
baseURI = url.toExternalForm();
}
InputStream in = url.openStream();
try {
OpenRDFUtil.verifyContextNotNull(contexts);
final ValueFactory vf = con.getRepository().getValueFactory();
RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE, vf);
ParserConfig config = rdfParser.getParserConfig();
// To emulate DatatypeHandling.IGNORE
config.addNonFatalError(BasicParserSettings.FAIL_ON_UNKNOWN_DATATYPES);
config.addNonFatalError(BasicParserSettings.VERIFY_DATATYPE_VALUES);
config.addNonFatalError(BasicParserSettings.NORMALIZE_DATATYPE_VALUES);
// config.set(BasicParserSettings.PRESERVE_BNODE_IDS, true);
// rdfParser.setVerifyData(false);
// rdfParser.setStopAtFirstError(true);
// rdfParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE);
RDFInserter rdfInserter = new RDFInserter(con);
rdfInserter.enforceContext(contexts);
rdfParser.setRDFHandler(rdfInserter);
con.begin();
try {
rdfParser.parse(in, baseURI);
}
catch (RDFHandlerException e) {
con.rollback();
// RDFInserter only throws wrapped RepositoryExceptions
throw (RepositoryException)e.getCause();
}
catch (RuntimeException e) {
con.rollback();
throw e;
}
finally {
con.commit();
}
}
finally {
in.close();
}
}
}
=======
/*
* Copyright (C) 2009-2013, Free University of Bozen Bolzano
* This source code is available under the terms of the Affero General Public
* License v3.
*
* Please see LICENSE.txt for full license terms, including the availability of
* proprietary exceptions.
*/
package it.unibz.krdb.obda.quest.sparql;
import info.aduna.io.FileUtil;
import info.aduna.io.ZipUtil;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.JarURLConnection;
import java.net.URL;
import java.util.jar.JarFile;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import org.openrdf.OpenRDFUtil;
import org.openrdf.model.Resource;
import org.openrdf.model.ValueFactory;
import org.openrdf.query.BindingSet;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.repository.util.RDFInserter;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParser;
import org.openrdf.rio.turtle.TurtleParser;
import org.openrdf.sail.memory.MemoryStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QuestManifestTestUtils {
static final Logger logger = LoggerFactory.getLogger(QuestManifestTestUtils.class);
public static TestSuite suite(SPARQLQueryParent.Factory factory) throws Exception
{
final String manifestFile;
final File tmpDir;
URL url = QuestManifestTestUtils.class.getResource("/testcases-dawg-quest/data-r2/manifest-evaluation.ttl");
if ("jar".equals(url.getProtocol())) {
// Extract manifest files to a temporary directory
try {
tmpDir = FileUtil.createTempDir("sparql-evaluation");
JarURLConnection con = (JarURLConnection)url.openConnection();
JarFile jar = con.getJarFile();
ZipUtil.extract(jar, tmpDir);
File localFile = new File(tmpDir, con.getEntryName());
manifestFile = localFile.toURI().toURL().toString();
}
catch (IOException e) {
throw new AssertionError(e);
}
}
else {
manifestFile = url.toString();
tmpDir = null;
}
TestSuite suite = new TestSuite(factory.getClass().getName()) {
@Override
public void run(TestResult result) {
try {
super.run(result);
}
finally {
if (tmpDir != null) {
try {
FileUtil.deleteDir(tmpDir);
}
catch (IOException e) {
System.err.println("Unable to clean up temporary directory '" + tmpDir + "': " + e.getMessage());
}
}
}
}
};
Repository manifestRep = new SailRepository(new MemoryStore());
manifestRep.initialize();
RepositoryConnection con = manifestRep.getConnection();
addTurtle(con, new URL(manifestFile), manifestFile);
String query = "SELECT DISTINCT manifestFile FROM {x} rdf:first {manifestFile} "
+ "USING NAMESPACE mf = <http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#>, "
+ " qt = <http://www.w3.org/2001/sw/DataAccess/tests/test-query#>";
TupleQueryResult manifestResults = con.prepareTupleQuery(QueryLanguage.SERQL, query, manifestFile).evaluate();
while (manifestResults.hasNext()) {
BindingSet bindingSet = manifestResults.next();
String subManifestFile = bindingSet.getValue("manifestFile").toString();
suite.addTest(SPARQLQueryParent.suite(subManifestFile, factory));
}
manifestResults.close();
con.close();
manifestRep.shutDown();
logger.info("Created aggregated test suite with " + suite.countTestCases() + " test cases.");
return suite;
}
static void addTurtle(RepositoryConnection con, URL url, String baseURI, Resource... contexts)
throws IOException, RepositoryException, RDFParseException
{
if (baseURI == null) {
baseURI = url.toExternalForm();
}
InputStream in = url.openStream();
try {
OpenRDFUtil.verifyContextNotNull(contexts);
final ValueFactory vf = con.getRepository().getValueFactory();
RDFParser rdfParser = new TurtleParser();
rdfParser.setValueFactory(vf);
rdfParser.setVerifyData(false);
rdfParser.setStopAtFirstError(true);
rdfParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE);
RDFInserter rdfInserter = new RDFInserter(con);
rdfInserter.enforceContext(contexts);
rdfParser.setRDFHandler(rdfInserter);
boolean autoCommit = con.isAutoCommit();
con.setAutoCommit(false);
try {
rdfParser.parse(in, baseURI);
}
catch (RDFHandlerException e) {
if (autoCommit) {
con.rollback();
}
// RDFInserter only throws wrapped RepositoryExceptions
throw (RepositoryException)e.getCause();
}
catch (RuntimeException e) {
if (autoCommit) {
con.rollback();
}
throw e;
}
finally {
con.setAutoCommit(autoCommit);
}
}
finally {
in.close();
}
}
}
>>>>>>>
/*
* Copyright (C) 2009-2013, Free University of Bozen Bolzano
* This source code is available under the terms of the Affero General Public
* License v3.
*
* Please see LICENSE.txt for full license terms, including the availability of
* proprietary exceptions.
*/
package it.unibz.krdb.obda.quest.sparql;
import info.aduna.io.FileUtil;
import info.aduna.io.ZipUtil;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.JarURLConnection;
import java.net.URL;
import java.util.jar.JarFile;
import junit.framework.TestResult;
import junit.framework.TestSuite;
import org.openrdf.OpenRDFUtil;
import org.openrdf.model.Resource;
import org.openrdf.model.ValueFactory;
import org.openrdf.query.BindingSet;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.TupleQueryResult;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.repository.util.RDFInserter;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.RDFParser;
import org.openrdf.rio.turtle.TurtleParser;
import org.openrdf.sail.memory.MemoryStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QuestManifestTestUtils {
static final Logger logger = LoggerFactory.getLogger(QuestManifestTestUtils.class);
public static TestSuite suite(SPARQLQueryParent.Factory factory) throws Exception
{
final String manifestFile;
final File tmpDir;
URL url = QuestManifestTestUtils.class.getResource("/testcases-dawg-quest/data-r2/manifest-evaluation.ttl");
if ("jar".equals(url.getProtocol())) {
// Extract manifest files to a temporary directory
try {
tmpDir = FileUtil.createTempDir("sparql-evaluation");
JarURLConnection con = (JarURLConnection)url.openConnection();
JarFile jar = con.getJarFile();
ZipUtil.extract(jar, tmpDir);
File localFile = new File(tmpDir, con.getEntryName());
manifestFile = localFile.toURI().toURL().toString();
}
catch (IOException e) {
throw new AssertionError(e);
}
}
else {
manifestFile = url.toString();
tmpDir = null;
}
TestSuite suite = new TestSuite(factory.getClass().getName()) {
@Override
public void run(TestResult result) {
try {
super.run(result);
}
finally {
if (tmpDir != null) {
try {
FileUtil.deleteDir(tmpDir);
}
catch (IOException e) {
System.err.println("Unable to clean up temporary directory '" + tmpDir + "': " + e.getMessage());
}
}
}
}
};
Repository manifestRep = new SailRepository(new MemoryStore());
manifestRep.initialize();
RepositoryConnection con = manifestRep.getConnection();
addTurtle(con, new URL(manifestFile), manifestFile);
String query = "SELECT DISTINCT manifestFile FROM {x} rdf:first {manifestFile} "
+ "USING NAMESPACE mf = <http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#>, "
+ " qt = <http://www.w3.org/2001/sw/DataAccess/tests/test-query#>";
TupleQueryResult manifestResults = con.prepareTupleQuery(QueryLanguage.SERQL, query, manifestFile).evaluate();
while (manifestResults.hasNext()) {
BindingSet bindingSet = manifestResults.next();
String subManifestFile = bindingSet.getValue("manifestFile").toString();
suite.addTest(SPARQLQueryParent.suite(subManifestFile, factory));
}
manifestResults.close();
con.close();
manifestRep.shutDown();
logger.info("Created aggregated test suite with " + suite.countTestCases() + " test cases.");
return suite;
}
static void addTurtle(RepositoryConnection con, URL url, String baseURI, Resource... contexts)
throws IOException, RepositoryException, RDFParseException
{
if (baseURI == null) {
baseURI = url.toExternalForm();
}
InputStream in = url.openStream();
try {
OpenRDFUtil.verifyContextNotNull(contexts);
final ValueFactory vf = con.getRepository().getValueFactory();
RDFParser rdfParser = Rio.createParser(RDFFormat.TURTLE, vf);
ParserConfig config = rdfParser.getParserConfig();
// To emulate DatatypeHandling.IGNORE
config.addNonFatalError(BasicParserSettings.FAIL_ON_UNKNOWN_DATATYPES);
config.addNonFatalError(BasicParserSettings.VERIFY_DATATYPE_VALUES);
config.addNonFatalError(BasicParserSettings.NORMALIZE_DATATYPE_VALUES);
// config.set(BasicParserSettings.PRESERVE_BNODE_IDS, true);
// rdfParser.setVerifyData(false);
// rdfParser.setStopAtFirstError(true);
// rdfParser.setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE);
RDFInserter rdfInserter = new RDFInserter(con);
rdfInserter.enforceContext(contexts);
rdfParser.setRDFHandler(rdfInserter);
con.begin();
try {
rdfParser.parse(in, baseURI);
}
catch (RDFHandlerException e) {
con.rollback();
// RDFInserter only throws wrapped RepositoryExceptions
throw (RepositoryException)e.getCause();
}
catch (RuntimeException e) {
con.rollback();
throw e;
}
finally {
con.commit();
}
}
finally {
in.close();
}
}
} |
<<<<<<<
=======
import static it.unibz.inf.ontop.model.OntopModelSingletons.SUBSTITUTION_FACTORY;
import static it.unibz.inf.ontop.model.OntopModelSingletons.TERM_FACTORY;
import static it.unibz.inf.ontop.model.term.functionsymbol.ExpressionOperation.EQ;
>>>>>>>
import static it.unibz.inf.ontop.model.term.functionsymbol.ExpressionOperation.EQ;
<<<<<<<
Optional<ImmutableExpression> newLJCondition = leftJoinNode.getOptionalFilterCondition()
.map(c1 -> analysis.getAdditionalExpression()
.map(c2 -> immutabilityTools.foldBooleanExpressions(c1, c2))
.orElseGet(() -> Optional.of(c1)))
.orElseGet(analysis::getAdditionalExpression);
=======
ImmutableSet<Variable> requiredVariablesAboveLJ = query.getVariablesRequiredByAncestors(leftJoinNode);
ImmutableSet<Variable> leftVariables = query.getVariables(leftChild);
/*
* All the conditions that could be assigned to the LJ put together
*/
Optional<ImmutableExpression> newLJCondition = ImmutabilityTools.foldBooleanExpressions(Stream.concat(
// Former condition
Stream.of(leftJoinNode.getOptionalFilterCondition(),
// New condition proposed by the analyser
analysis.getAdditionalExpression(),
// Former additional filter condition on the right
rightComponent.filterNode.map(FilterNode::getFilterCondition))
.filter(Optional::isPresent)
.map(Optional::get),
// Equalities extracted from the right substitution
rightComponent.constructionNode
.map(n -> extractEqualities(n.getSubstitution(), leftVariables))
.orElseGet(Stream::empty)));
Optional<ImmutableSubstitution<ImmutableTerm>> remainingRightSubstitution = rightComponent.constructionNode
.map(ConstructionNode::getSubstitution)
.filter(s -> !s.isEmpty())
.map(s -> SUBSTITUTION_FACTORY.getSubstitution(s.getImmutableMap().entrySet().stream()
.filter(e -> !leftVariables.contains(e.getKey()))
.collect(ImmutableCollectors.toMap())))
.filter(s -> !s.isEmpty());
>>>>>>>
ImmutableSet<Variable> requiredVariablesAboveLJ = query.getVariablesRequiredByAncestors(leftJoinNode);
ImmutableSet<Variable> leftVariables = query.getVariables(leftChild);
/*
* All the conditions that could be assigned to the LJ put together
*/
Optional<ImmutableExpression> newLJCondition = immutabilityTools.foldBooleanExpressions(Stream.concat(
// Former condition
Stream.of(leftJoinNode.getOptionalFilterCondition(),
// New condition proposed by the analyser
analysis.getAdditionalExpression(),
// Former additional filter condition on the right
rightComponent.filterNode.map(FilterNode::getFilterCondition))
.filter(Optional::isPresent)
.map(Optional::get),
// Equalities extracted from the right substitution
rightComponent.constructionNode
.map(n -> extractEqualities(n.getSubstitution(), leftVariables))
.orElseGet(Stream::empty)));
Optional<ImmutableSubstitution<ImmutableTerm>> remainingRightSubstitution = rightComponent.constructionNode
.map(ConstructionNode::getSubstitution)
.filter(s -> !s.isEmpty())
.map(s -> substitutionFactory.getSubstitution(s.getImmutableMap().entrySet().stream()
.filter(e -> !leftVariables.contains(e.getKey()))
.collect(ImmutableCollectors.toMap())))
.filter(s -> !s.isEmpty());
<<<<<<<
ImmutableSubstitution<ImmutableFunctionalTerm> substitutionToPropagate = substitutionFactory.getSubstitution(
=======
ImmutableSubstitution<ImmutableTerm> conditionalVarSubstitution = SUBSTITUTION_FACTORY.getSubstitution(
>>>>>>>
ImmutableSubstitution<ImmutableTerm> conditionalVarSubstitution = substitutionFactory.getSubstitution( |
<<<<<<<
import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
=======
import com.google.common.collect.*;
>>>>>>>
import com.google.common.collect.*;
import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject; |
<<<<<<<
import com.google.inject.Inject;
=======
import com.google.common.collect.Lists;
>>>>>>>
import com.google.inject.Inject;
import com.google.common.collect.Lists;
<<<<<<<
import it.unibz.inf.ontop.injection.OntopMappingSQLSettings;
import it.unibz.inf.ontop.model.atom.AtomFactory;
=======
>>>>>>>
import it.unibz.inf.ontop.injection.OntopMappingSQLSettings;
import it.unibz.inf.ontop.model.atom.AtomFactory;
<<<<<<<
private List<SQLPPTriplesMap> instantiateMapping(Connection connection, DBMetadata metadata, String id,
ImmutableFunctionalTerm target, String sql)
throws SQLException, JSQLParserException, InvalidSelectQueryException, UnsupportedSelectQueryException {
ImmutableList<SelectExpressionItem> queryColumns = getQueryColumns(metadata, sql);
int arity = isURIRDFType(target.getTerm(1)) ? 1 : 2;
Function templateAtom = (Function)((arity == 1)
? target.getTerm(2) // template is in the position of object
: target.getTerm(1)); // template is in the position of predicate
ImmutableList<SelectExpressionItem> templateColumns =
getTemplateColumns(metadata.getQuotedIDFactory(), templateAtom, queryColumns);
ImmutableList<SelectItem> newColumns = queryColumns.stream()
.filter(c -> !templateColumns.contains(c))
.collect(ImmutableCollectors.toList());
if (newColumns.isEmpty()) // avoid empty SELECT clause
newColumns = ImmutableList.of(new AllColumns());
List<List<String>> templateValues = getTemplateValues(connection, sql, templateColumns);
List<SQLPPTriplesMap> expandedMappings = new ArrayList<>(templateValues.size());
=======
public boolean hasMappingsToBeExpanded() { return !mappingsToBeExpanded.isEmpty(); }
>>>>>>>
public boolean hasMappingsToBeExpanded() { return !mappingsToBeExpanded.isEmpty(); }
<<<<<<<
String predicateName = getPredicateName(templateAtom.getTerm(0), values);
ImmutableFunctionalTerm newTarget = (arity == 1)
? termFactory.getImmutableFunctionalTerm(atomFactory.getClassPredicate(predicateName),
target.getTerm(0))
: termFactory.getImmutableFunctionalTerm(atomFactory.getObjectPropertyPredicate(predicateName),
target.getTerm(0), target.getTerm(2));
=======
for (Expansion m : mappingsToBeExpanded) {
try {
boolean isClass = isURIRDFType(m.target.getTerm(1));
// if isClass, then the template is the object;
// otherwise, it's a property and the template is the predicate
Function templateAtom = (Function)m.target.getTerm(isClass ? 2 : 1);
>>>>>>>
for (Expansion m : mappingsToBeExpanded) {
try {
boolean isClass = isURIRDFType(m.target.getTerm(1));
// if isClass, then the template is the object;
// otherwise, it's a property and the template is the predicate
Function templateAtom = (Function)m.target.getTerm(isClass ? 2 : 1);
<<<<<<<
private ImmutableList<SelectExpressionItem> getQueryColumns(DBMetadata metadata, String sql)
=======
private static ImmutableMap<QuotedID, SelectExpressionItem> getQueryColumns(DBMetadata metadata, String sql)
>>>>>>>
private ImmutableMap<QuotedID, SelectExpressionItem> getQueryColumns(DBMetadata metadata, String sql)
<<<<<<<
SelectQueryAttributeExtractor2 sqae = new SelectQueryAttributeExtractor2(metadata, termFactory);
=======
SelectQueryAttributeExtractor2 sqae = new SelectQueryAttributeExtractor2(metadata);
>>>>>>>
SelectQueryAttributeExtractor2 sqae = new SelectQueryAttributeExtractor2(metadata, termFactory); |
<<<<<<<
this.rewriter = questinstance.getRewriter();
this.querygenerator = questinstance.cloneIfNecessaryNativeQueryGenerator();
=======
// this.unfoldingmechanism = questinstance.unfolder;
this.querygenerator = questinstance.cloneDataSourceQueryGenerator();
>>>>>>>
this.querygenerator = questinstance.cloneIfNecessaryNativeQueryGenerator(); |
<<<<<<<
import it.unibz.inf.ontop.model.type.LanguageTag;
import it.unibz.inf.ontop.model.type.RDFDatatype;
import it.unibz.inf.ontop.model.type.TermType;
import it.unibz.inf.ontop.spec.mapping.SQLMappingFactory;
import it.unibz.inf.ontop.spec.mapping.pp.SQLPPTriplesMap;
import it.unibz.inf.ontop.spec.mapping.pp.impl.OntopNativeSQLPPTriplesMap;
=======
import it.unibz.inf.ontop.model.term.*;
>>>>>>>
import it.unibz.inf.ontop.model.type.LanguageTag;
import it.unibz.inf.ontop.model.type.RDFDatatype;
import it.unibz.inf.ontop.model.type.TermType;
import it.unibz.inf.ontop.spec.mapping.SQLMappingFactory;
import it.unibz.inf.ontop.spec.mapping.pp.SQLPPTriplesMap;
import it.unibz.inf.ontop.spec.mapping.pp.impl.OntopNativeSQLPPTriplesMap;
<<<<<<<
import it.unibz.inf.ontop.model.type.COL_TYPE;
=======
import it.unibz.inf.ontop.model.term.functionsymbol.Predicate.COL_TYPE;
import it.unibz.inf.ontop.spec.mapping.SQLMappingFactory;
>>>>>>>
import it.unibz.inf.ontop.model.type.COL_TYPE;
<<<<<<<
import it.unibz.inf.ontop.model.term.*;
import it.unibz.inf.ontop.spec.ontology.Assertion;
import it.unibz.inf.ontop.spec.ontology.ClassExpression;
import it.unibz.inf.ontop.spec.ontology.DataPropertyAssertion;
import it.unibz.inf.ontop.spec.ontology.DataPropertyExpression;
import it.unibz.inf.ontop.spec.ontology.ImmutableOntologyVocabulary;
import it.unibz.inf.ontop.spec.ontology.ObjectPropertyAssertion;
import it.unibz.inf.ontop.spec.ontology.ObjectPropertyExpression;
import it.unibz.inf.ontop.spec.ontology.ClassAssertion;
import it.unibz.inf.ontop.spec.ontology.OClass;
import it.unibz.inf.ontop.spec.ontology.Equivalences;
import it.unibz.inf.ontop.spec.ontology.EquivalencesDAG;
import it.unibz.inf.ontop.spec.ontology.TBoxReasoner;
=======
import it.unibz.inf.ontop.spec.mapping.pp.SQLPPTriplesMap;
import it.unibz.inf.ontop.spec.mapping.pp.impl.OntopNativeSQLPPTriplesMap;
import it.unibz.inf.ontop.spec.ontology.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
>>>>>>>
import it.unibz.inf.ontop.model.term.*;
import it.unibz.inf.ontop.spec.ontology.Assertion;
import it.unibz.inf.ontop.spec.ontology.ClassExpression;
import it.unibz.inf.ontop.spec.ontology.DataPropertyAssertion;
import it.unibz.inf.ontop.spec.ontology.DataPropertyExpression;
import it.unibz.inf.ontop.spec.ontology.ImmutableOntologyVocabulary;
import it.unibz.inf.ontop.spec.ontology.ObjectPropertyAssertion;
import it.unibz.inf.ontop.spec.ontology.ObjectPropertyExpression;
import it.unibz.inf.ontop.spec.ontology.ClassAssertion;
import it.unibz.inf.ontop.spec.ontology.OClass;
import it.unibz.inf.ontop.spec.ontology.Equivalences;
import it.unibz.inf.ontop.spec.ontology.EquivalencesDAG;
import it.unibz.inf.ontop.spec.ontology.TBoxReasoner;
<<<<<<<
case LITERAL_LANG:
LanguageTag languageTag = ((RDFDatatype)type2).getLanguageTag().get();
objectTerm = TERM_FACTORY.getImmutableTypedTerm(Y, TERM_FACTORY.getConstantLiteral(
languageTag.getFullString()));
=======
case LANG_STRING:
objectTerm = TERM_FACTORY.getImmutableTypedTerm(Y, TERM_FACTORY.getVariable("Z"));
>>>>>>>
case LANG_STRING:
LanguageTag languageTag = ((RDFDatatype)type2).getLanguageTag().get();
objectTerm = TERM_FACTORY.getImmutableTypedTerm(Y, TERM_FACTORY.getConstantLiteral(
languageTag.getFullString())); |
<<<<<<<
import com.google.common.collect.ImmutableMap;
=======
>>>>>>>
import com.google.common.collect.ImmutableMap;
<<<<<<<
import it.unibz.inf.ontop.mapping.MappingMetadata;
import it.unibz.inf.ontop.model.*;
=======
>>>>>>>
import it.unibz.inf.ontop.mapping.MappingMetadata;
import it.unibz.inf.ontop.model.*;
<<<<<<<
=======
import it.unibz.inf.ontop.model.*;
import it.unibz.inf.ontop.model.impl.MappingFactoryImpl;
import it.unibz.inf.ontop.model.impl.RDBMSourceParameterConstants;
>>>>>>>
<<<<<<<
import it.unibz.inf.ontop.ontology.Ontology;
import it.unibz.inf.ontop.owlrefplatform.core.basicoperations.VocabularyValidator;
import it.unibz.inf.ontop.owlrefplatform.core.mappingprocessing.TMappingExclusionConfig;
=======
>>>>>>>
import it.unibz.inf.ontop.ontology.Ontology;
import it.unibz.inf.ontop.owlrefplatform.core.basicoperations.VocabularyValidator;
import it.unibz.inf.ontop.owlrefplatform.core.mappingprocessing.TMappingExclusionConfig; |
<<<<<<<
import java.util.Collections;
=======
import java.sql.SQLException;
>>>>>>>
<<<<<<<
=======
import java.util.List;
>>>>>>>
<<<<<<<
private final IQuestStatement st;
private final NativeQueryLanguageComponentFactory nativeQLFactory;
private Logger log = LoggerFactory.getLogger(QuestDBStatement.class);
=======
private final QuestStatement st;
private final Logger log = LoggerFactory.getLogger(QuestDBStatement.class);
>>>>>>>
private final IQuestStatement st;
private final NativeQueryLanguageComponentFactory nativeQLFactory;
private final Logger log = LoggerFactory.getLogger(QuestDBStatement.class);
<<<<<<<
result = st.insertData(aBoxNormalIter, useFile, commit, batch);
=======
result = st.insertData(aBoxNormalIter, /*useFile,*/ commit, batch);
>>>>>>>
result = st.insertData(aBoxNormalIter, /*useFile,*/ commit, batch);
<<<<<<<
MappingParser parser = nativeQLFactory.create(new File(uri));
OBDAModel obdaModel = parser.getOBDAModel();
materializer = new QuestMaterializer(obdaModel);
=======
OBDAModel obdaModel = OBDADataFactoryImpl.getInstance().getOBDAModel();
ModelIOManager io = new ModelIOManager(obdaModel);
io.load(uri.toString());
materializer = new QuestMaterializer(obdaModel, false);
>>>>>>>
MappingParser parser = nativeQLFactory.create(new File(uri));
OBDAModel obdaModel = parser.getOBDAModel();
materializer = new QuestMaterializer(obdaModel, false);
<<<<<<<
/**
* Ontop is not SQL-specific anymore.
*
* Use getTargetQuery instead.
*/
@Deprecated
public String getSQL(String query) throws OBDAException {
return st.unfoldAndGenerateTargetQuery(query).getNativeQueryString();
}
public TargetQuery getTargetQuery(String query) throws OBDAException {
return st.unfoldAndGenerateTargetQuery(query);
=======
/*
* QuestSpecific
*/
public String getSQL(String query) throws Exception {
return st.getUnfolding(query);
>>>>>>>
/**
* Ontop is not SQL-specific anymore.
*
* Use getTargetQuery instead.
*/
@Deprecated
public String getSQL(String query) throws OBDAException {
return st.unfoldAndGenerateTargetQuery(query).getNativeQueryString();
}
public TargetQuery getTargetQuery(String query) throws OBDAException {
return st.unfoldAndGenerateTargetQuery(query); |
<<<<<<<
private final AtomFactory atomFactory;
private final TermFactory termFactory;
private final DatalogFactory datalogFactory;
=======
private final JdbcTypeMapper jdbcTypeMapper;
>>>>>>>
<<<<<<<
AtomFactory atomFactory,
TermFactory termFactory, DatalogFactory datalogFactory,
TypeFactory typeFactory) {
=======
JdbcTypeMapper jdbcTypeMapper, TypeFactory typeFactory) {
>>>>>>>
TypeFactory typeFactory) {
<<<<<<<
this.atomFactory = atomFactory;
this.termFactory = termFactory;
this.datalogFactory = datalogFactory;
=======
this.jdbcTypeMapper = jdbcTypeMapper;
>>>>>>>
<<<<<<<
RDBMetadata metadata = RDBMetadataExtractionTools.createMetadata(connection, termFactory, typeFactory,
datalogFactory, atomFactory);
=======
RDBMetadata metadata = RDBMetadataExtractionTools.createMetadata(connection, typeFactory, jdbcTypeMapper);
>>>>>>>
RDBMetadata metadata = RDBMetadataExtractionTools.createMetadata(connection, typeFactory); |
<<<<<<<
import org.semanticweb.ontop.protege4.core.OBDAModelWrapper;
=======
import org.semanticweb.ontop.ontology.DataPropertyExpression;
import org.semanticweb.ontop.ontology.ObjectPropertyExpression;
>>>>>>>
import org.semanticweb.ontop.protege4.core.OBDAModelWrapper;
import org.semanticweb.ontop.ontology.DataPropertyExpression;
import org.semanticweb.ontop.ontology.ObjectPropertyExpression; |
<<<<<<<
switch (literal.getType()) {
case OBJECT:
case LITERAL:
case STRING:
// creates xsd:string
return fact.createLiteral(literal.getValue());
case LITERAL_LANG:
// creates xsd:langString
return fact.createLiteral(literal.getValue(), literal.getLanguage());
default:
IRI datatype = dtfac.getDatatypeURI(literal.getType());
if (datatype == null)
throw new RuntimeException(
"Found unknown TYPE for constant: " + literal + " with COL_TYPE=" + literal.getType());
return fact.createLiteral(literal.getValue(), datatype);
}
=======
if ((literal.getType() == COL_TYPE.LITERAL) || (literal.getType() == COL_TYPE.LITERAL_LANG)) {
return fact.createLiteral(literal.getValue(), literal.getLanguage());
}
else if (literal.getType() == COL_TYPE.OBJECT) {
return fact.createLiteral(literal.getValue(), DATATYPE_FACTORY.getDatatypeURI(COL_TYPE.STRING));
}
else {
IRI datatype = DATATYPE_FACTORY.getDatatypeURI(literal.getType());
if (datatype == null)
throw new RuntimeException("Found unknown TYPE for constant: " + literal + " with COL_TYPE="+ literal.getType());
return fact.createLiteral(literal.getValue(), datatype);
}
>>>>>>>
switch (literal.getType()) {
case OBJECT:
case LITERAL:
case STRING:
// creates xsd:string
return fact.createLiteral(literal.getValue());
case LITERAL_LANG:
// creates xsd:langString
return fact.createLiteral(literal.getValue(), literal.getLanguage());
default:
IRI datatype = DATATYPE_FACTORY.getDatatypeURI(literal.getType());
if (datatype == null)
throw new RuntimeException(
"Found unknown TYPE for constant: " + literal + " with COL_TYPE=" + literal.getType());
return fact.createLiteral(literal.getValue(), datatype);
} |
<<<<<<<
/**
* Replaces the sub-tree (root included) by a single node
*/
default void replaceSubTree(QueryNode subTreeRootNode, QueryNode replacingNode) {
insertParent(subTreeRootNode, replacingNode);
removeSubTree(subTreeRootNode);
}
/**
* If no position is given, replaces the parent node by its first child
*/
void replaceNodeByChild(QueryNode parentNode,
Optional<NonCommutativeOperatorNode.ArgumentPosition> optionalReplacingChildPosition);
=======
/**
* Keeps the same query node objects but clones the tree edges
* (since the latter are mutable by default).
*/
QueryTreeComponent createSnapshot();
>>>>>>>
/**
* If no position is given, replaces the parent node by its first child
*/
void replaceNodeByChild(QueryNode parentNode,
Optional<NonCommutativeOperatorNode.ArgumentPosition> optionalReplacingChildPosition);
/**
* Keeps the same query node objects but clones the tree edges
* (since the latter are mutable by default).
*/
QueryTreeComponent createSnapshot(); |
<<<<<<<
return iqConverter.convert(iq.normalizeForOptimization(), query.getDBMetadata(), query.getExecutorRegistry());
=======
return iqConverter.convert(iq.liftBinding(), query.getExecutorRegistry());
>>>>>>>
return iqConverter.convert(iq.normalizeForOptimization(), query.getExecutorRegistry()); |
<<<<<<<
import it.unibz.inf.ontop.model.type.COL_TYPE;
=======
import it.unibz.inf.ontop.model.term.functionsymbol.Predicate;
import org.apache.commons.lang3.time.DateUtils;
>>>>>>>
import it.unibz.inf.ontop.model.type.COL_TYPE;
import org.apache.commons.lang3.time.DateUtils;
<<<<<<<
return TERM_FACTORY.getConstantLiteral(
convertDatetimeString(value),
COL_TYPE.DATETIME
=======
return TERM_FACTORY.getConstantLiteral( DateTimeFormatter.ISO_DATE_TIME.format(convertToJavaDate(value)),Predicate.COL_TYPE.DATETIME
>>>>>>>
return TERM_FACTORY.getConstantLiteral( DateTimeFormatter.ISO_DATE_TIME.format(convertToJavaDate(value)), COL_TYPE.DATETIME
<<<<<<<
stringValue = system.equals(ORACLE)?
convertDatetimeString(stringValue):
stringValue;
return TERM_FACTORY.getConstantLiteral(
stringValue.replaceFirst(" ", "T").replaceAll(" ", ""),
COL_TYPE.DATETIME_STAMP
=======
return TERM_FACTORY.getConstantLiteral( DateTimeFormatter.ISO_DATE_TIME.format(convertToJavaDate(value)),Predicate.COL_TYPE.DATETIME_STAMP
>>>>>>>
return TERM_FACTORY.getConstantLiteral( DateTimeFormatter.ISO_DATE_TIME.format(convertToJavaDate(value)), COL_TYPE.DATETIME_STAMP
<<<<<<<
if(system.equals(ORACLE)) {
try {
DateFormat df = new SimpleDateFormat("dd-MMM-yy", Locale.ENGLISH);
java.util.Date date = df.parse(stringValue);
stringValue = date.toString();
} catch (ParseException e) {
throw new OntopResultConversionException(e);
}
}
return TERM_FACTORY.getConstantLiteral(stringValue, COL_TYPE.DATE);
=======
return TERM_FACTORY.getConstantLiteral( DateTimeFormatter.ISO_DATE.format(convertToJavaDate(value)),Predicate.COL_TYPE.DATE);
>>>>>>>
return TERM_FACTORY.getConstantLiteral( DateTimeFormatter.ISO_DATE.format(convertToJavaDate(value)), COL_TYPE.DATE);
<<<<<<<
return TERM_FACTORY.getConstantLiteral(stringValue.replace(' ', 'T'), COL_TYPE.TIME);
=======
return TERM_FACTORY.getConstantLiteral(DateTimeFormatter.ISO_TIME.format(convertToTime(value)), Predicate.COL_TYPE.TIME);
>>>>>>>
return TERM_FACTORY.getConstantLiteral(DateTimeFormatter.ISO_TIME.format(convertToTime(value)), COL_TYPE.TIME); |
<<<<<<<
=======
import static it.unibz.inf.ontop.model.OntopModelSingletons.*;
import static it.unibz.inf.ontop.model.term.TermConstants.FALSE;
import static it.unibz.inf.ontop.model.term.TermConstants.TRUE;
>>>>>>> |
<<<<<<<
=======
import it.unibz.inf.ontop.model.term.Constant;
>>>>>>>
<<<<<<<
import it.unibz.inf.ontop.model.term.Constant;
import it.unibz.inf.ontop.model.type.COL_TYPE;
=======
import it.unibz.inf.ontop.model.term.functionsymbol.Predicate;
>>>>>>>
import it.unibz.inf.ontop.model.term.Constant;
import it.unibz.inf.ontop.model.type.COL_TYPE;
<<<<<<<
COL_TYPE type = OWLTypeMapper.getType(object.getDatatype());
c2 = TERM_FACTORY.getConstantLiteral(object.getLiteral(), type);
=======
Optional<Predicate.COL_TYPE> type = TYPE_FACTORY.getDatatype(object.getDatatype().toStringID());
if(type.isPresent()){
c2 = TERM_FACTORY.getConstantLiteral(object.getLiteral(), type.get());
}
else {
c2 = TERM_FACTORY.getConstantLiteral(object.getLiteral());
}
>>>>>>>
Optional<COL_TYPE> type = TYPE_FACTORY.getDatatype(object.getDatatype().toStringID());
if(type.isPresent()){
c2 = TERM_FACTORY.getConstantLiteral(object.getLiteral(), type.get());
}
else {
c2 = TERM_FACTORY.getConstantLiteral(object.getLiteral());
}
<<<<<<<
COL_TYPE type = OWLTypeMapper.getType(owlLiteral.getDatatype());
return TERM_FACTORY.getConstantLiteral(owlLiteral.getLiteral(), type);
=======
Optional<Predicate.COL_TYPE> type = TYPE_FACTORY.getDatatype(owlLiteral.getDatatype().toStringID());
if(!type.isPresent()){
return TERM_FACTORY.getConstantLiteral(owlLiteral.getLiteral());
}
return TERM_FACTORY.getConstantLiteral(owlLiteral.getLiteral(), type.get());
>>>>>>>
Optional<COL_TYPE> type = TYPE_FACTORY.getDatatype(owlLiteral.getDatatype().toStringID());
if(!type.isPresent()){
return TERM_FACTORY.getConstantLiteral(owlLiteral.getLiteral());
}
return TERM_FACTORY.getConstantLiteral(owlLiteral.getLiteral(), type.get()); |
<<<<<<<
// TODO: distinguish the strict and non-strict equalities
default Expression getFunctionEQ(Term firstTerm, Term secondTerm) {
return getFunctionStrictEQ(firstTerm, secondTerm);
}
public Expression getFunctionStrictEQ(Term firstTerm, Term secondTerm);
/**
* To be used when parsing the mapping and when an equality is found.
* Is expected to replaced later by a proper equality (may be strict or not)
*/
ImmutableExpression getNotYetTypedEquality(ImmutableTerm t1, ImmutableTerm t2);
=======
>>>>>>>
/**
* To be used when parsing the mapping and when an equality is found.
* Is expected to replaced later by a proper equality (may be strict or not)
*/
ImmutableExpression getNotYetTypedEquality(ImmutableTerm t1, ImmutableTerm t2); |
<<<<<<<
public IQTree removeDistincts(IQTree child, IQProperties iqProperties) {
IQTree newChild = child.removeDistincts();
IQProperties newProperties = newChild.equals(child)
? iqProperties.declareDistinctRemovalWithoutEffect()
: iqProperties.declareDistinctRemovalWithEffect();
return iqFactory.createUnaryIQTree(this, newChild, newProperties);
}
@Override
=======
public ImmutableSet<ImmutableSubstitution<NonVariableTerm>> getPossibleVariableDefinitions(IQTree child) {
return child.getPossibleVariableDefinitions();
}
@Override
>>>>>>>
public ImmutableSet<ImmutableSubstitution<NonVariableTerm>> getPossibleVariableDefinitions(IQTree child) {
return child.getPossibleVariableDefinitions();
}
@Override
public IQTree removeDistincts(IQTree child, IQProperties iqProperties) {
IQTree newChild = child.removeDistincts();
IQProperties newProperties = newChild.equals(child)
? iqProperties.declareDistinctRemovalWithoutEffect()
: iqProperties.declareDistinctRemovalWithEffect();
return iqFactory.createUnaryIQTree(this, newChild, newProperties);
}
@Override |
<<<<<<<
this.unificationTools = unificationTools;
this.constructionNodeTools = constructionNodeTools;
this.substitutionTools = substitutionTools;
this.substitutionFactory = substitutionFactory;
this.termFactory = termFactory;
this.nullValue = termFactory.getNullConstant();
=======
this.childVariables = extractChildVariables(projectedVariables, substitution);
>>>>>>>
this.unificationTools = unificationTools;
this.constructionNodeTools = constructionNodeTools;
this.substitutionTools = substitutionTools;
this.substitutionFactory = substitutionFactory;
this.termFactory = termFactory;
this.nullValue = termFactory.getNullConstant();
this.childVariables = extractChildVariables(projectedVariables, substitution);
<<<<<<<
this.constructionNodeTools = constructionNodeTools;
this.substitutionFactory = substitutionFactory;
this.nullValue = termFactory.getNullConstant();
=======
this.childVariables = extractChildVariables(projectedVariables, substitution);
>>>>>>>
this.constructionNodeTools = constructionNodeTools;
this.substitutionFactory = substitutionFactory;
this.nullValue = termFactory.getNullConstant();
this.childVariables = extractChildVariables(projectedVariables, substitution);
<<<<<<<
this.nullValue = termFactory.getNullConstant();
=======
this.childVariables = extractChildVariables(projectedVariables, substitution);
>>>>>>>
this.nullValue = termFactory.getNullConstant();
this.childVariables = extractChildVariables(projectedVariables, substitution); |
<<<<<<<
=======
private final DBParameters dbParameters;
>>>>>>>
<<<<<<<
=======
this.dbParameters = dbParameters;
>>>>>>> |
<<<<<<<
=======
/***
* This method introduces new variable names in each data atom and
* equalities to account for JOIN operations. This method is called before
* generating SQL queries and allows to avoid cross refrences in nested
* JOINs, which generate wrong ON or WHERE conditions.
*
*
* @param query
*/
public static void pullOutEqualities(CQIE query) {
Substitution substitutions = new SubstitutionImpl();
int[] newVarCounter = { 1 };
Set<Function> booleanAtoms = new HashSet<>();
List<Function> equalities = new LinkedList<>();
pullOutEqualities(query.getBody(), substitutions, equalities, newVarCounter, false);
List<Function> body = query.getBody();
body.addAll(equalities);
/*
* All new variables have been generated, the substitutions also, we
* need to apply them to the equality atoms and to the head of the
* query.
*/
SubstitutionUtilities.applySubstitution(query, substitutions, false);
}
>>>>>>>
/***
* This method introduces new variable names in each data atom and
* equalities to account for JOIN operations. This method is called before
* generating SQL queries and allows to avoid cross refrences in nested
* JOINs, which generate wrong ON or WHERE conditions.
*
*
* @param query
*/
public static void pullOutEqualities(CQIE query) {
Substitution substitutions = new SubstitutionImpl();
int[] newVarCounter = { 1 };
Set<Function> booleanAtoms = new HashSet<>();
List<Function> equalities = new LinkedList<>();
pullOutEqualities(query.getBody(), substitutions, equalities, newVarCounter, false);
List<Function> body = query.getBody();
body.addAll(equalities);
/*
* All new variables have been generated, the substitutions also, we
* need to apply them to the equality atoms and to the head of the
* query.
*/
SubstitutionUtilities.applySubstitution(query, substitutions, false);
}
<<<<<<<
/***
* Takes an AND atom and breaks it into a list of individual condition
* atoms.
*
* @param atom
* @return
*/
public static List<Function> getUnfolderAtomList(Function atom) {
if (atom.getFunctionSymbol() != ExpressionOperation.AND) {
throw new InvalidParameterException();
}
List<Term> innerFunctionalTerms = new LinkedList<>();
for (Term term : atom.getTerms()) {
innerFunctionalTerms.addAll(getUnfolderTermList((Function) term));
}
List<Function> newatoms = new LinkedList<Function>();
for (Term innerterm : innerFunctionalTerms) {
Function f = (Function) innerterm;
Function newatom = fac.getFunction(f.getFunctionSymbol(), f.getTerms());
newatoms.add(newatom);
}
return newatoms;
}
/***
* Takes an AND atom and breaks it into a list of individual condition
* atoms.
*
* @param term
* @return
*/
public static List<Term> getUnfolderTermList(Function term) {
List<Term> result = new LinkedList<>();
if (term.getFunctionSymbol() != ExpressionOperation.AND) {
result.add(term);
} else {
List<Term> terms = term.getTerms();
for (Term currentterm : terms) {
if (currentterm instanceof Function) {
result.addAll(getUnfolderTermList((Function) currentterm));
} else {
result.add(currentterm);
}
}
}
return result;
}
=======
>>>>>>> |
<<<<<<<
import it.unibz.inf.ontop.model.term.functionsymbol.db.BnodeStringTemplateFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.DBConcatFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.IRIStringTemplateFunctionSymbol;
import it.unibz.inf.ontop.model.type.RDFDatatype;
import it.unibz.inf.ontop.model.type.RDFTermType;
import it.unibz.inf.ontop.model.type.TermTypeInference;
=======
import it.unibz.inf.ontop.model.term.functionsymbol.db.BnodeStringTemplateFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.DBConcatFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.DBTypeConversionFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.IRIStringTemplateFunctionSymbol;
import it.unibz.inf.ontop.model.type.RDFDatatype;
import it.unibz.inf.ontop.model.type.TermTypeInference;
>>>>>>>
import it.unibz.inf.ontop.model.term.functionsymbol.db.BnodeStringTemplateFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.DBConcatFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.DBTypeConversionFunctionSymbol;
import it.unibz.inf.ontop.model.term.functionsymbol.db.IRIStringTemplateFunctionSymbol;
import it.unibz.inf.ontop.model.type.RDFDatatype;
import it.unibz.inf.ontop.model.type.TermTypeInference;
<<<<<<<
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
=======
import java.util.Optional;
>>>>>>>
import java.util.Optional;
<<<<<<<
private static String appendTerms(ImmutableTerm term) {
if (term instanceof Constant) {
String st = ((Constant) term).getValue();
if (st.contains("{")) {
st = st.replace("{", "\\{");
st = st.replace("}", "\\}");
}
return st;
} else {
return "{" + ((Variable) term).getName() + "}";
}
}
=======
>>>>>>>
<<<<<<<
return displayURIConstant((Constant)term, prefixManager);
if (term instanceof RDFLiteralConstant)
return displayValueConstant((Constant)term);
=======
return displayIRIConstant((IRIConstant)term, prefixManager);
if (term instanceof RDFLiteralConstant)
return displayValueConstant((RDFLiteralConstant)term);
>>>>>>>
return displayIRIConstant((IRIConstant)term, prefixManager);
if (term instanceof RDFLiteralConstant)
return displayValueConstant((RDFLiteralConstant)term);
<<<<<<<
private static String displayURIConstant(Term term, PrefixManager prefixManager) {
return getAbbreviatedName(term.toString(), prefixManager, false); // shorten the URI if possible
=======
private static String displayIRIConstant(IRIConstant iri, PrefixManager prefixManager) {
if (iri.getIRI().getIRIString().equals(RDF.TYPE.getIRIString())) {
return "a";
} else {
return getAbbreviatedName(iri.toString(), prefixManager, false); // shorten the URI if possible
}
>>>>>>>
private static String displayIRIConstant(IRIConstant iri, PrefixManager prefixManager) {
if (iri.getIRI().getIRIString().equals(RDF.TYPE.getIRIString())) {
return "a";
} else {
return getAbbreviatedName(iri.toString(), prefixManager, false); // shorten the URI if possible
}
<<<<<<<
FunctionSymbol functionSymbol = function.getFunctionSymbol();
if (functionSymbol instanceof RDFTermFunctionSymbol) {
ImmutableTerm lexicalTerm = function.getTerm(0);
Optional<RDFDatatype> optionalDatatype = function.inferType()
.flatMap(TermTypeInference::getTermType)
.filter(t -> t instanceof RDFDatatype)
.map(t -> (RDFDatatype) t);
if (optionalDatatype.isPresent()) {
return displayDatatypeFunction(lexicalTerm, optionalDatatype.get(), prefixManager);
}
if (lexicalTerm instanceof ImmutableFunctionalTerm) {
ImmutableFunctionalTerm lexicalFunctionalTerm = (ImmutableFunctionalTerm) lexicalTerm;
FunctionSymbol lexicalFunctionSymbol = lexicalFunctionalTerm.getFunctionSymbol();
if (lexicalFunctionSymbol instanceof IRIStringTemplateFunctionSymbol)
return displayURITemplate(lexicalFunctionalTerm, prefixManager);
if (lexicalFunctionSymbol instanceof BnodeStringTemplateFunctionSymbol)
return displayFunctionalBnode(lexicalFunctionalTerm);
}
}
if (functionSymbol instanceof DBConcatFunctionSymbol)
=======
FunctionSymbol functionSymbol = function.getFunctionSymbol();
if (functionSymbol instanceof RDFTermFunctionSymbol) {
ImmutableTerm lexicalTerm = function.getTerm(0);
Optional<RDFDatatype> optionalDatatype = function.inferType()
.flatMap(TermTypeInference::getTermType)
.filter(t -> t instanceof RDFDatatype)
.map(t -> (RDFDatatype) t);
if (optionalDatatype.isPresent()) {
return displayDatatypeFunction(lexicalTerm, optionalDatatype.get(), prefixManager);
} else if (lexicalTerm instanceof ImmutableFunctionalTerm) {
ImmutableFunctionalTerm lexicalFunctionalTerm = (ImmutableFunctionalTerm) lexicalTerm;
FunctionSymbol lexicalFunctionSymbol = lexicalFunctionalTerm.getFunctionSymbol();
if (lexicalFunctionSymbol instanceof IRIStringTemplateFunctionSymbol)
return displayURITemplate(lexicalFunctionalTerm, prefixManager);
else if (lexicalFunctionSymbol instanceof BnodeStringTemplateFunctionSymbol)
return displayFunctionalBnode(lexicalFunctionalTerm);
}
throw new IllegalArgumentException("unsupported function " + function);
} else if (functionSymbol instanceof DBConcatFunctionSymbol) {
>>>>>>>
FunctionSymbol functionSymbol = function.getFunctionSymbol();
if (functionSymbol instanceof RDFTermFunctionSymbol) {
ImmutableTerm lexicalTerm = function.getTerm(0);
Optional<RDFDatatype> optionalDatatype = function.inferType()
.flatMap(TermTypeInference::getTermType)
.filter(t -> t instanceof RDFDatatype)
.map(t -> (RDFDatatype) t);
if (optionalDatatype.isPresent()) {
return displayDatatypeFunction(lexicalTerm, optionalDatatype.get(), prefixManager);
} else if (lexicalTerm instanceof ImmutableFunctionalTerm) {
ImmutableFunctionalTerm lexicalFunctionalTerm = (ImmutableFunctionalTerm) lexicalTerm;
FunctionSymbol lexicalFunctionSymbol = lexicalFunctionalTerm.getFunctionSymbol();
if (lexicalFunctionSymbol instanceof IRIStringTemplateFunctionSymbol)
return displayURITemplate(lexicalFunctionalTerm, prefixManager);
if (lexicalFunctionSymbol instanceof BnodeStringTemplateFunctionSymbol)
return displayFunctionalBnode(lexicalFunctionalTerm);
}
throw new IllegalArgumentException("unsupported function " + function);
} else if (functionSymbol instanceof DBConcatFunctionSymbol) {
<<<<<<<
return displayOrdinaryFunction(function, functionSymbol.getName(), prefixManager);
=======
} else if (functionSymbol instanceof DBTypeConversionFunctionSymbol) {
return displayVariable((Variable) function.getTerm(0));
} else {
return displayOrdinaryFunction(function, functionSymbol.getName(), prefixManager);
}
// return null; // TODO: why do we need a return here??
>>>>>>>
return displayOrdinaryFunction(function, functionSymbol.getName(), prefixManager);
<<<<<<<
private static String displayDatatypeFunction(ImmutableTerm lexicalTerm, RDFDatatype datatype, PrefixManager prefixManager) {
String lexicalString = getDisplayName(lexicalTerm, prefixManager);
return datatype.getLanguageTag()
.map(tag -> lexicalString + "@" + tag.getFullString())
.orElseGet(() -> lexicalString + "^^"
+ getAbbreviatedName(datatype.getIRI().getIRIString(), prefixManager, false));
=======
private static String displayDatatypeFunction(ImmutableTerm lexicalTerm, RDFDatatype datatype, PrefixManager prefixManager) {
final String lexicalString = getDisplayName(lexicalTerm, prefixManager);
return datatype.getLanguageTag()
.map(tag -> lexicalString + "@" + tag.getFullString())
.orElseGet(() -> {
final String typePostfix = datatype.getIRI().equals(RDFS.LITERAL) ? "" : "^^"
+ getAbbreviatedName(datatype.getIRI().getIRIString(), prefixManager, false);
return lexicalString + typePostfix;
});
>>>>>>>
private static String displayDatatypeFunction(ImmutableTerm lexicalTerm, RDFDatatype datatype, PrefixManager prefixManager) {
final String lexicalString = getDisplayName(lexicalTerm, prefixManager);
return datatype.getLanguageTag()
.map(tag -> lexicalString + "@" + tag.getFullString())
.orElseGet(() -> lexicalString + "^^"
+ getAbbreviatedName(datatype.getIRI().getIRIString(), prefixManager, false));
<<<<<<<
terms.forEach(TargetQueryRenderer::appendTerms);
=======
for (ImmutableTerm term : terms) {
if (term instanceof Constant) {
String st = ((Constant) term).getValue();
if (st.contains("{")) {
st = st.replace("{", "\\{");
st = st.replace("}", "\\}");
}
sb.append(st);
} else {
sb.append("{").append(((Variable) term).getName()).append("}");
}
}
>>>>>>>
for (ImmutableTerm term : terms) {
if (term instanceof Constant) {
String st = ((Constant) term).getValue();
if (st.contains("{")) {
st = st.replace("{", "\\{");
st = st.replace("}", "\\}");
}
sb.append(st);
} else {
sb.append("{").append(((Variable) term).getName()).append("}");
}
} |
<<<<<<<
.propertyFile(propertyFile)
=======
.enableTestMode()
>>>>>>>
.propertyFile(propertyFile)
.enableTestMode() |
<<<<<<<
* Copyright (C) 2009-2013, Free University of Bozen Bolzano This source code is
* available under the terms of the Affero General Public License v3.
=======
* #%L
* ontop-reformulation-core
* %%
* Copyright (C) 2009 - 2013 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
>>>>>>>
* Copyright (C) 2009-2013, Free University of Bozen Bolzano This source code is
* available under the terms of the Affero General Public License v3.
* #%L
* ontop-reformulation-core
* %%
* Copyright (C) 2009 - 2013 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at |
<<<<<<<
=======
private final JdbcTypeMapper jdbcTypeMapper;
private final RDF rdfFactory;
>>>>>>>
private final RDF rdfFactory;
<<<<<<<
TargetAtomFactory targetAtomFactory, SubstitutionFactory substitutionFactory) {
=======
TargetAtomFactory targetAtomFactory, SubstitutionFactory substitutionFactory,
JdbcTypeMapper jdbcTypeMapper, RDF rdfFactory) {
>>>>>>>
TargetAtomFactory targetAtomFactory, SubstitutionFactory substitutionFactory,
RDF rdfFactory) {
<<<<<<<
=======
this.jdbcTypeMapper = jdbcTypeMapper;
this.rdfFactory = rdfFactory;
>>>>>>>
this.rdfFactory = rdfFactory; |
<<<<<<<
import it.unibz.inf.ontop.substitution.SubstitutionFactory;
=======
import org.apache.commons.rdf.api.RDF;
>>>>>>>
import it.unibz.inf.ontop.substitution.SubstitutionFactory;
import org.apache.commons.rdf.api.RDF;
<<<<<<<
private final SubstitutionFactory substitutionFactory;
=======
private final RDF rdfFactory;
>>>>>>>
private final RDF rdfFactory;
private final SubstitutionFactory substitutionFactory;
<<<<<<<
SubstitutionFactory substitutionFactory, OntopSystemSQLSettings settings) {
=======
RDF rdfFactory, OntopSystemSQLSettings settings) {
>>>>>>>
RDF rdfFactory, SubstitutionFactory substitutionFactory,
OntopSystemSQLSettings settings) {
<<<<<<<
this.substitutionFactory = substitutionFactory;
=======
this.rdfFactory = rdfFactory;
>>>>>>>
this.rdfFactory = rdfFactory;
this.substitutionFactory = substitutionFactory;
<<<<<<<
? new DistinctJDBCSolutionMappingSet(set, SQLSignature, constructionNode, termFactory, substitutionFactory)
: new JDBCSolutionMappingSet(set, SQLSignature, constructionNode, termFactory, substitutionFactory);
=======
? new SQLDistinctTupleResultSet(set, signature, constructionNode, dbMetadata, iriDictionary,
termFactory, typeFactory, rdfFactory)
: new DelegatedIriSQLTupleResultSet(set, signature, constructionNode, dbMetadata, iriDictionary,
termFactory, typeFactory, rdfFactory);
>>>>>>>
? new DistinctJDBCSolutionMappingSet(set, SQLSignature, constructionNode, termFactory, substitutionFactory)
: new JDBCSolutionMappingSet(set, SQLSignature, constructionNode, termFactory, substitutionFactory);
<<<<<<<
ResultSet rs = sqlStatement.executeQuery(sqlQuery);
tuples = new JDBCSolutionMappingSet(rs, SQLSignature, constructionNode,
termFactory, substitutionFactory );
=======
ResultSet set = sqlStatement.executeQuery(sqlQuery);
tuples = new DelegatedIriSQLTupleResultSet(set, signature, constructionNode, dbMetadata,
iriDictionary, termFactory, typeFactory, rdfFactory);
>>>>>>>
ResultSet rs = sqlStatement.executeQuery(sqlQuery);
tuples = new JDBCSolutionMappingSet(rs, SQLSignature, constructionNode,
termFactory, substitutionFactory ); |
<<<<<<<
import it.unibz.inf.ontop.model.type.COL_TYPE;
import it.unibz.inf.ontop.model.term.TermConstants;
import it.unibz.inf.ontop.utils.UriTemplateMatcher;
import it.unibz.inf.ontop.model.term.Constant;
import it.unibz.inf.ontop.model.term.Function;
import it.unibz.inf.ontop.model.term.Term;
import it.unibz.inf.ontop.model.term.Variable;
=======
import it.unibz.inf.ontop.model.term.functionsymbol.Predicate.COL_TYPE;
>>>>>>>
import it.unibz.inf.ontop.model.type.COL_TYPE; |
<<<<<<<
import it.unibz.inf.ontop.owlrefplatform.core.optimization.IntermediateQueryOptimizer;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.TopDownSubstitutionLiftOptimizer;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.unfolding.QueryUnfolder;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.unfolding.impl.QueryUnfolderImpl;
=======
import it.unibz.inf.ontop.owlrefplatform.core.optimization.BasicLeftJoinOptimizer;
>>>>>>>
import it.unibz.inf.ontop.owlrefplatform.core.optimization.BasicLeftJoinOptimizer;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.IntermediateQueryOptimizer;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.TopDownSubstitutionLiftOptimizer;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.unfolding.QueryUnfolder;
import it.unibz.inf.ontop.owlrefplatform.core.optimization.unfolding.impl.QueryUnfolderImpl;
<<<<<<<
=======
BasicLeftJoinOptimizer leftJoinOptimizer = new BasicLeftJoinOptimizer();
intermediateQuery = leftJoinOptimizer.optimize(intermediateQuery);
log.debug("New query after left join optimization: \n" + intermediateQuery.toString());
BasicJoinOptimizer joinOptimizer = new BasicJoinOptimizer();
intermediateQuery = joinOptimizer.optimize(intermediateQuery);
log.debug("New query after join optimization: \n" + intermediateQuery.toString());
>>>>>>> |
<<<<<<<
private static final String DIALECT_SERIALIZER_SUFFIX = "-serializer";
private static final String DIALECT_NORMALIZER_SUFFIX = "-normalizer";
=======
private static final String DB_MP_FACTORY_SUFFIX = "-metadataProvider";
>>>>>>>
private static final String DIALECT_SERIALIZER_SUFFIX = "-serializer";
private static final String DIALECT_NORMALIZER_SUFFIX = "-normalizer";
private static final String DB_MP_FACTORY_SUFFIX = "-metadataProvider";
<<<<<<<
/*
* Dialect serializer
*/
String serializerKey = jdbcDriver + DIALECT_SERIALIZER_SUFFIX;
String serializerName = SelectFromWhereSerializer.class.getCanonicalName();
Optional.ofNullable(properties.getProperty(serializerKey))
.filter(v -> !userProperties.containsKey(serializerName))
.ifPresent(v -> properties.setProperty(serializerName, v));
/*
* Dialect normalizer
*/
String normalizerKey = jdbcDriver + DIALECT_NORMALIZER_SUFFIX;
String normalizerName = DialectExtraNormalizer.class.getCanonicalName();
Optional.ofNullable(properties.getProperty(normalizerKey))
.filter(v -> !userProperties.containsKey(normalizerName))
.ifPresent(v -> properties.setProperty(normalizerName, v));
=======
/*
* DB metadata provider
*/
String dbMPFactoryKey = jdbcDriver + DB_MP_FACTORY_SUFFIX;
String dbMPFactoryName = DBMetadataProvider.class.getCanonicalName();
Optional.ofNullable(properties.getProperty(dbMPFactoryKey))
// Must NOT override user properties
.filter(v -> !userProperties.containsKey(dbMPFactoryName))
.ifPresent(v -> properties.setProperty(dbMPFactoryName, v));
>>>>>>>
/*
* Dialect serializer
*/
String serializerKey = jdbcDriver + DIALECT_SERIALIZER_SUFFIX;
String serializerName = SelectFromWhereSerializer.class.getCanonicalName();
Optional.ofNullable(properties.getProperty(serializerKey))
.filter(v -> !userProperties.containsKey(serializerName))
.ifPresent(v -> properties.setProperty(serializerName, v));
/*
* Dialect normalizer
*/
String normalizerKey = jdbcDriver + DIALECT_NORMALIZER_SUFFIX;
String normalizerName = DialectExtraNormalizer.class.getCanonicalName();
Optional.ofNullable(properties.getProperty(normalizerKey))
.filter(v -> !userProperties.containsKey(normalizerName))
.ifPresent(v -> properties.setProperty(normalizerName, v));
/*
* DB metadata provider
*/
String dbMPFactoryKey = jdbcDriver + DB_MP_FACTORY_SUFFIX;
String dbMPFactoryName = DBMetadataProvider.class.getCanonicalName();
Optional.ofNullable(properties.getProperty(dbMPFactoryKey))
// Must NOT override user properties
.filter(v -> !userProperties.containsKey(dbMPFactoryName))
.ifPresent(v -> properties.setProperty(dbMPFactoryName, v)); |
<<<<<<<
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import it.unibz.inf.ontop.exception.DuplicateMappingException;
import it.unibz.inf.ontop.exception.InvalidMappingException;
import it.unibz.inf.ontop.injection.OBDAProperties;
import it.unibz.inf.ontop.io.InvalidDataSourceException;
import it.unibz.inf.ontop.mapping.MappingParser;
import it.unibz.inf.ontop.model.CQIE;
import it.unibz.inf.ontop.model.Predicate;
import it.unibz.inf.ontop.ontology.DataPropertyExpression;
=======
import it.unibz.inf.ontop.io.ModelIOManager;
import it.unibz.inf.ontop.model.*;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
import it.unibz.inf.ontop.ontology.DataPropertyExpression;
>>>>>>>
import it.unibz.inf.ontop.injection.OBDAProperties;
import it.unibz.inf.ontop.model.*;
import it.unibz.inf.ontop.ontology.DataPropertyExpression;
<<<<<<<
=======
import java.io.File;
import java.net.URI;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
>>>>>>>
import java.io.File;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
<<<<<<<
private OWLOntology ontology;
=======
private OBDAModel obdaModel;
private OWLOntology owlOntology;
>>>>>>>
private OWLOntology owlOntology;
<<<<<<<
loadOBDA(p);
loadR2rml(p);
=======
String jdbcurl = "jdbc:mysql://10.7.20.39/npd";
String username = "fish";
String password = "fish";
String driverclass = "com.mysql.jdbc.Driver";
OBDADataFactory f = OBDADataFactoryImpl.getInstance();
// String sourceUrl = "http://example.org/customOBDA";
URI obdaURI = new File(r2rmlfile).toURI();
String sourceUrl = obdaURI.toString();
OBDADataSource dataSource = f.getJDBCDataSource(sourceUrl, jdbcurl,
username, password, driverclass);
loadR2rml(p, dataSource);
loadOBDA(p);
>>>>>>>
loadOBDA(p);
loadR2rml(p);
<<<<<<<
for (CQIE q : reasonerOBDA.getQuestInstance().getQuestUnfolder().getRules()) {
if (!reasonerR2rml.getQuestInstance().getQuestUnfolder().getRules().contains(q))
=======
for (CQIE q : reasonerOBDA.getQuestInstance().getUnfolderRules()) {
if (!reasonerR2rml.getQuestInstance().getUnfolderRules().contains(q))
>>>>>>>
for (CQIE q : reasonerOBDA.getQuestInstance().getUnfolderRules()) {
if (!reasonerR2rml.getQuestInstance().getUnfolderRules().contains(q))
<<<<<<<
for (CQIE q : reasonerR2rml.getQuestInstance().getQuestUnfolder().getRules()) {
if (!reasonerOBDA.getQuestInstance().getQuestUnfolder().getRules().contains(q))
=======
for (CQIE q : reasonerR2rml.getQuestInstance().getUnfolderRules()) {
if (!reasonerOBDA.getQuestInstance().getUnfolderRules().contains(q))
>>>>>>>
for (CQIE q : reasonerR2rml.getQuestInstance().getUnfolderRules()) {
if (!reasonerOBDA.getQuestInstance().getUnfolderRules().contains(q))
<<<<<<<
// Make sure the R2RML parser will be used.
p.put(MappingParser.class.getCanonicalName(), R2RMLMappingParser.class.getCanonicalName());
// Creating a new instance of the reasoner
QuestOWLFactory factory = new QuestOWLFactory(new File(r2rmlfile), new QuestPreferences(p));
reasonerR2rml = factory.createReasoner(ontology, new SimpleConfiguration());
=======
R2RMLReader reader = null;
reader = new R2RMLReader(r2rmlfile);
obdaModel = reader.readModel(dataSource);
QuestOWLConfiguration config = QuestOWLConfiguration.builder().obdaModel(obdaModel).preferences(p).build();
reasonerR2rml = factory.createReasoner(owlOntology, config);
>>>>>>>
QuestOWLConfiguration config = QuestOWLConfiguration.builder()
.nativeOntopMappingFile(new File(r2rmlfile))
.preferences(new R2RMLQuestPreferences(p))
.build();
reasonerR2rml = factory.createReasoner(owlOntology, config);
<<<<<<<
QuestOWLFactory factory = new QuestOWLFactory(new File(obdafile), new QuestPreferences(p));
reasonerOBDA = factory.createReasoner(ontology, new SimpleConfiguration());
=======
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder().obdaModel(obdaModel).preferences(p).build();
reasonerOBDA = factory.createReasoner(owlOntology, config);
>>>>>>>
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder()
.nativeOntopMappingFile(new File(obdafile))
.preferences(new QuestPreferences(p))
.build();
reasonerOBDA = factory.createReasoner(owlOntology, config); |
<<<<<<<
import it.unibz.inf.ontop.model.atom.AtomFactory;
=======
import it.unibz.inf.ontop.iq.optimizer.BindingLiftOptimizer;
>>>>>>>
import it.unibz.inf.ontop.iq.optimizer.BindingLiftOptimizer;
import it.unibz.inf.ontop.model.atom.AtomFactory;
<<<<<<<
private UnionBasedQueryMergerImpl(IntermediateQueryFactory iqFactory, FixedPointBindingLiftOptimizer bindingLifter,
ConstructionNodeCleaner constructionNodeCleaner, FlattenUnionOptimizer unionFlattener,
SubstitutionFactory substitutionFactory, AtomFactory atomFactory, TermFactory termFactory) {
=======
private UnionBasedQueryMergerImpl(IntermediateQueryFactory iqFactory, BindingLiftOptimizer bindingLifter,
ConstructionNodeCleaner constructionNodeCleaner,
FlattenUnionOptimizer unionFlattener) {
>>>>>>>
private UnionBasedQueryMergerImpl(IntermediateQueryFactory iqFactory, BindingLiftOptimizer bindingLifter,
ConstructionNodeCleaner constructionNodeCleaner,
FlattenUnionOptimizer unionFlattener, SubstitutionFactory substitutionFactory,
AtomFactory atomFactory, TermFactory termFactory) { |
<<<<<<<
=======
import it.unibz.inf.ontop.sql.ImplicitDBConstraintsReader;
import org.openrdf.model.Model;
import org.openrdf.repository.RepositoryException;
import org.semanticweb.owlapi.model.OWLOntology;
>>>>>>>
import org.openrdf.model.Model;
import org.openrdf.repository.RepositoryException;
import org.semanticweb.owlapi.model.OWLOntology;
<<<<<<<
=======
/**
* Sets the implicit db constraints, i.e. primary and foreign keys not in the database
* Must be called before the call to initialize
*
* @param userConstraints
*/
public void setImplicitDBConstraints(ImplicitDBConstraintsReader userConstraints){
if(userConstraints == null)
throw new NullPointerException();
if(this.initialized)
throw new Error("Implicit DB Constraints must be given before the call to initialize to have effect. See https://github.com/ontop/ontop/wiki/Implicit-database-constraints and https://github.com/ontop/ontop/wiki/API-change-in-SesameVirtualRepo-and-QuestDBVirtualStore");
this.virtualStore.setImplicitDBConstraints(userConstraints);
}
>>>>>>>
<<<<<<<
private void createRepo(String name, String tboxFile, String mappingFile, QuestPreferences pref) throws Exception
=======
private void createRepo(String name, String tboxFile, String mappingFile, QuestPreferences pref, ImplicitDBConstraintsReader userConstraints) throws Exception
>>>>>>>
private void createRepo(String name, String tboxFile, String mappingFile, QuestPreferences pref) throws Exception |
<<<<<<<
import it.unibz.inf.ontop.ontology.Ontology;
import it.unibz.inf.ontop.ontology.OntologyFactory;
import it.unibz.inf.ontop.ontology.impl.OntologyFactoryImpl;
import it.unibz.inf.ontop.owlrefplatform.core.QuestConstants;
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
import it.unibz.inf.ontop.owlrefplatform.core.abox.RDBMSSIRepositoryManager;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWL;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLFactory;
=======
>>>>>>>
<<<<<<<
=======
// Loading the OBDA data
obdaModel = fac.getOBDAModel();
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load(new File(obdafile));
>>>>>>>
<<<<<<<
Properties p = new Properties();
p.put(QuestPreferences.REFORMULATION_TECHNIQUE, QuestConstants.TW);
p.put(QuestPreferences.DBTYPE, QuestConstants.SEMANTIC_INDEX);
p.put(QuestPreferences.ABOX_MODE, QuestConstants.CLASSIC);
p.put(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.put(QuestPreferences.OBTAIN_FROM_ONTOLOGY, "true");
p.put(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
p.put(QuestPreferences.STORAGE_LOCATION, QuestConstants.INMEMORY);
=======
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.REFORMULATION_TECHNIQUE, QuestConstants.TW);
p.setCurrentValueOf(QuestPreferences.DBTYPE, QuestConstants.SEMANTIC_INDEX);
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.CLASSIC);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setCurrentValueOf(QuestPreferences.OBTAIN_FROM_ONTOLOGY, "true");
p.setCurrentValueOf(QuestPreferences.STORAGE_LOCATION, QuestConstants.INMEMORY);
>>>>>>>
Properties p = new Properties();
p.put(QuestPreferences.REFORMULATION_TECHNIQUE, QuestConstants.TW);
p.put(QuestPreferences.DBTYPE, QuestConstants.SEMANTIC_INDEX);
p.put(QuestPreferences.ABOX_MODE, QuestConstants.CLASSIC);
p.put(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.put(QuestPreferences.OBTAIN_FROM_ONTOLOGY, "true");
p.put(QuestPreferences.STORAGE_LOCATION, QuestConstants.INMEMORY);
<<<<<<<
Ontology ont = ofac.createOntology();
ont.getVocabulary().createClass("A");
ont.getVocabulary().createObjectProperty("P");
ont.getVocabulary().createDataProperty("P");
ont.getVocabulary().createObjectProperty("Q");
ont.getVocabulary().createDataProperty("D");
QuestOWLFactory factory = new QuestOWLFactory(new QuestPreferences(p));
QuestOWL reasoner = factory.createReasoner(ontology);
=======
OntologyVocabulary vb = ofac.createVocabulary();
vb.createClass("A");
vb.createObjectProperty("P");
vb.createDataProperty("P");
vb.createObjectProperty("Q");
vb.createDataProperty("D");
Ontology ont = ofac.createOntology(vb);
Quest quest = new Quest(ont, p);
quest.setupRepository();
>>>>>>>
OntologyVocabulary vb = ofac.createVocabulary();
vb.createClass("A");
vb.createObjectProperty("P");
vb.createDataProperty("P");
vb.createObjectProperty("Q");
vb.createDataProperty("D");
Ontology ont = ofac.createOntology(vb);
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder()
.properties(p)
.build();
QuestOWL reasoner = factory.createReasoner(ontology, config);
Quest quest = new Quest(ont, p);
quest.setupRepository(); |
<<<<<<<
import java.io.File;
import java.net.URI;
import java.sql.Connection;
import java.util.Properties;
import com.google.inject.Guice;
import com.google.inject.Injector;
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
import it.unibz.inf.ontop.injection.OBDACoreModule;
import it.unibz.inf.ontop.injection.OBDAProperties;
=======
>>>>>>>
import com.google.inject.Guice;
import com.google.inject.Injector;
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
import it.unibz.inf.ontop.injection.OBDACoreModule;
import it.unibz.inf.ontop.injection.OBDAProperties;
<<<<<<<
import it.unibz.inf.ontop.owlrefplatform.owlapi3.*;
=======
import it.unibz.inf.ontop.owlrefplatform.owlapi.*;
>>>>>>>
import it.unibz.inf.ontop.owlrefplatform.owlapi.*; |
<<<<<<<
SubstitutionFactory substitutionFactory,
=======
RDF rdfFactory,
>>>>>>>
SubstitutionFactory substitutionFactory,
RDF rdfFactory,
<<<<<<<
dbMetadata, inputQueryFactory, termFactory, typeFactory, substitutionFactory, settings);
=======
dbMetadata, inputQueryFactory, termFactory, typeFactory, rdfFactory, settings);
>>>>>>>
dbMetadata, inputQueryFactory, termFactory, typeFactory, rdfFactory, substitutionFactory, settings); |
<<<<<<<
typeFactory, datalogFactory, targetAtomFactory, substitutionFactory);
=======
typeFactory, datalogFactory, targetAtomFactory, substitutionFactory, jdbcTypeMapper, rdfFactory);
>>>>>>>
typeFactory, datalogFactory, targetAtomFactory, substitutionFactory, rdfFactory); |
<<<<<<<
import java.io.File;
import java.net.URI;
import java.util.Iterator;
import java.util.Set;
import it.unibz.inf.ontop.model.*;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryLanguage;
=======
import it.unibz.inf.ontop.io.ModelIOManager;
import it.unibz.inf.ontop.model.*;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
import it.unibz.inf.ontop.ontology.Assertion;
import it.unibz.inf.ontop.owlapi.OWLAPIABoxIterator;
import it.unibz.inf.ontop.owlrefplatform.core.abox.NTripleAssertionIterator;
import it.unibz.inf.ontop.owlrefplatform.core.abox.QuestMaterializer;
>>>>>>>
import java.io.File;
import java.net.URI;
import java.util.Iterator;
import java.util.Set;
import it.unibz.inf.ontop.model.*;
import org.openrdf.query.MalformedQueryException;
import org.openrdf.query.QueryLanguage;
<<<<<<<
import org.openrdf.query.parser.QueryParser;
import org.openrdf.query.parser.QueryParserUtil;
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
import it.unibz.inf.ontop.mapping.MappingParser;
import it.unibz.inf.ontop.ontology.Assertion;
import it.unibz.inf.ontop.owlapi3.OWLAPI3ABoxIterator;import it.unibz.inf.ontop.owlrefplatform.core.abox.EquivalentTriplePredicateIterator;
import it.unibz.inf.ontop.owlrefplatform.core.abox.NTripleAssertionIterator;
import it.unibz.inf.ontop.owlrefplatform.core.abox.QuestMaterializer;
=======
>>>>>>>
import org.openrdf.query.parser.QueryParser;
import org.openrdf.query.parser.QueryParserUtil;
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
import it.unibz.inf.ontop.mapping.MappingParser;
import it.unibz.inf.ontop.ontology.Assertion;
import it.unibz.inf.ontop.owlapi3.OWLAPI3ABoxIterator;
import it.unibz.inf.ontop.owlrefplatform.core.abox.EquivalentTriplePredicateIterator;
import it.unibz.inf.ontop.owlrefplatform.core.abox.NTripleAssertionIterator;
import it.unibz.inf.ontop.owlrefplatform.core.abox.QuestMaterializer;
<<<<<<<
/**
* Implementation of QuestDBStatement.
*
* TODO: rename it QuestDBStatementImpl.
*/
// DISABLED TEMPORARILY FOR MERGING PURPOSES (NOT BREAKING CLIENTS WITH this ugly name IQquestOWLStatement)
//public class QuestDBStatement implements IQuestDBStatement {
=======
import java.net.URI;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.Set;
>>>>>>>
/**
* Implementation of QuestDBStatement.
*
* TODO: rename it QuestDBStatementImpl.
*/
// DISABLED TEMPORARILY FOR MERGING PURPOSES (NOT BREAKING CLIENTS WITH this ugly name IQquestOWLStatement)
//public class QuestDBStatement implements IQuestDBStatement {
<<<<<<<
IQuest questInstance = st.getQuestInstance();
=======
>>>>>>>
<<<<<<<
EquivalentTriplePredicateIterator aBoxNormalIter =
new EquivalentTriplePredicateIterator(new OWLAPI3ABoxIterator(ontos),
questInstance.getReasoner());
result = st.insertData(aBoxNormalIter, /*useFile,*/ commit, batch);
=======
OWLAPIABoxIterator aBoxIter = new OWLAPIABoxIterator(ontos, st.questInstance.getVocabulary());
result = st.insertData(aBoxIter, /*useFile,*/ commit, batch);
>>>>>>>
OWLAPIABoxIterator aBoxIter = new OWLAPIABoxIterator(ontos, st.questInstance.getVocabulary());
result = st.insertData(aBoxIter, /*useFile,*/ commit, batch);
<<<<<<<
@Deprecated
public String getSQL(String query) throws OBDAException {
return ((SQLExecutableQuery)st.unfoldAndGenerateTargetQuery(query)).getSQL();
}
public ExecutableQuery getTargetQuery(String query) throws OBDAException {
return st.unfoldAndGenerateTargetQuery(query);
=======
public String getSQL(String query) throws Exception {
ParsedQuery pq = st.questInstance.getEngine().getParsedQuery(query);
return st.questInstance.getEngine().getSQL(pq);
>>>>>>>
@Deprecated
public String getSQL(String query) throws OBDAException {
//ParsedQuery pq = st.questInstance.getEngine().getParsedQuery(query);
//return st.questInstance.getEngine().translateIntoNativeQuery(pq);
return ((SQLExecutableQuery)st.unfoldAndGenerateTargetQuery(query)).getSQL();
}
public ExecutableQuery getTargetQuery(String query) throws OBDAException {
return st.unfoldAndGenerateTargetQuery(query);
<<<<<<<
@Override
public int getTupleCount(String query) throws OBDAException {
return st.getTupleCount(query);
}
public String getRewriting(String query) throws OBDAException {
QueryParser qp = QueryParserUtil.createParser(QueryLanguage.SPARQL);
ParsedQuery pq = null; // base URI is null
try {
pq = qp.parseQuery(query, null);
} catch (MalformedQueryException e) {
throw new OBDAException(e);
}
// SparqlAlgebraToDatalogTranslator tr = new SparqlAlgebraToDatalogTranslator(this.st.getQuestInstance().getUriTemplateMatcher());
//ImmutableList<String> signatureContainer = tr.getSignature(pq);
return st.getRewriting(pq/*, signatureContainer*/);
=======
public String getRewriting(String query) throws Exception {
ParsedQuery pq = st.questInstance.getEngine().getParsedQuery(query);
return st.questInstance.getEngine().getRewriting(pq);
>>>>>>>
@Override
public int getTupleCount(String query) throws OBDAException {
return st.getTupleCount(query);
}
public String getRewriting(String query) throws Exception {
ParsedQuery pq = st.questInstance.getEngine().getParsedQuery(query);
return st.getRewriting(pq); |
<<<<<<<
// DISABLED TEMPORARILY FOR MERGING PURPOSES (NOT BREAKING CLIENTS WITH this ugly name IQquestOWLStatement)
//public class QuestOWLStatement implements IQuestOWLStatement {
public class QuestOWLStatement {
private IQuestStatement st;
private QuestOWLConnection conn;
public QuestOWLStatement(IQuestStatement st, QuestOWLConnection conn) {
=======
public class QuestOWLStatement implements AutoCloseable {
private final QuestStatement st;
private final QuestOWLConnection conn;
protected QuestOWLStatement(QuestStatement st, QuestOWLConnection conn) {
>>>>>>>
// DISABLED TEMPORARILY FOR MERGING PURPOSES (NOT BREAKING CLIENTS WITH this ugly name IQquestOWLStatement)
//public class QuestOWLStatement implements IQuestOWLStatement {
public class QuestOWLStatement implements AutoCloseable {
private IQuestStatement st;
private QuestOWLConnection conn;
public QuestOWLStatement(IQuestStatement st, QuestOWLConnection conn) {
<<<<<<<
IQuest questInstance = st.getQuestInstance();
SparqlAlgebraToDatalogTranslator tr = new SparqlAlgebraToDatalogTranslator(questInstance.getUriTemplateMatcher());
ImmutableList<String> signatureContainer = tr.getSignature(pq);
return st.getRewriting(pq, signatureContainer);
} catch (Exception e) {
=======
//SparqlAlgebraToDatalogTranslator tr = st.questInstance.getSparqlAlgebraToDatalogTranslator();
//List<String> signatureContainer = tr.getSignature(pq);
return st.getRewriting(pq/*, signatureContainer*/);
}
catch (Exception e) {
>>>>>>>
IQuest questInstance = st.getQuestInstance();
//SparqlAlgebraToDatalogTranslator tr = new SparqlAlgebraToDatalogTranslator(questInstance.getUriTemplateMatcher());
//ImmutableList<String> signatureContainer = tr.getSignature(pq);
return st.getRewriting(pq/*, signatureContainer*/);
} catch (Exception e) {
<<<<<<<
=======
>>>>>>> |
<<<<<<<
import com.google.common.collect.ImmutableList;
import it.unibz.inf.ontop.model.atom.TargetAtom;
import it.unibz.inf.ontop.model.atom.TargetAtomFactory;
=======
import it.unibz.inf.ontop.model.atom.AtomFactory;
import com.google.common.collect.ImmutableList;
>>>>>>>
import com.google.common.collect.ImmutableList;
import it.unibz.inf.ontop.model.atom.TargetAtom;
import it.unibz.inf.ontop.model.atom.TargetAtomFactory;
<<<<<<<
protected ImmutableTerm construct(String text) {
ImmutableTerm toReturn = null;
=======
protected Term constructIRI(String text) {
Term toReturn = null;
>>>>>>>
protected ImmutableTerm constructIRI(String text) {
ImmutableTerm toReturn = null;
<<<<<<<
List<ImmutableTerm> terms = new LinkedList<>();
List<FormatString> tokens = parse(text);
=======
List<Term> terms = new LinkedList<>();
List<FormatString> tokens = parseIRI(text);
>>>>>>>
List<ImmutableTerm> terms = new LinkedList<>();
List<FormatString> tokens = parseIRI(text); |
<<<<<<<
}
else if (function instanceof StringOperationPredicate) {
// Functions returning string values
mainColumn = getSQLString(ov, index, false);
}
=======
}
else if (function.isArithmeticPredicate()){
// For numerical operators, e.g., MUTLIPLY, SUBTRACT, ADDITION
String expressionFormat = getNumericalOperatorString(function);
Term left = ov.getTerm(0);
Term right = ov.getTerm(1);
String leftOp = getSQLString(left, index, true);
String rightOp = getSQLString(right, index, true);
mainColumn = String.format("(" + expressionFormat + ")", leftOp, rightOp);
}
>>>>>>>
}
else if (function instanceof StringOperationPredicate) {
// Functions returning string values
mainColumn = getSQLString(ov, index, false);
}
else if (function.isArithmeticPredicate()){
// For numerical operators, e.g., MUTLIPLY, SUBTRACT, ADDITION
String expressionFormat = getNumericalOperatorString(function);
Term left = ov.getTerm(0);
Term right = ov.getTerm(1);
String leftOp = getSQLString(left, index, true);
String rightOp = getSQLString(right, index, true);
mainColumn = String.format("(" + expressionFormat + ")", leftOp, rightOp);
}
<<<<<<<
else if (function instanceof StringOperationPredicate) {
type = COL_TYPE.LITERAL;
}
=======
else if (ov.isArithmeticFunction()) {
type = COL_TYPE.LITERAL;
}
>>>>>>>
else if (function instanceof StringOperationPredicate) {
type = COL_TYPE.LITERAL;
}
else if (ov.isArithmeticFunction()) {
type = COL_TYPE.LITERAL;
} |
<<<<<<<
mapper = jsonMapperBuilder()
.withCoercionConfigDefaults(h ->
h.setCoercion(CoercionInputShape.EmptyString,
CoercionAction.TryConvert))
.build();
assertNull(_verifyFromEmptyPass(mapper));
=======
mapper = newJsonMapper();
mapper.coercionConfigDefaults().setCoercion(shape,
CoercionAction.TryConvert);
assertNull(_verifyFromEmptyPass(mapper, JSON_EMPTY));
>>>>>>>
mapper = jsonMapperBuilder()
.withCoercionConfigDefaults(h ->
h.setCoercion(CoercionInputShape.EmptyString,
CoercionAction.TryConvert))
.build();
assertNull(_verifyFromEmptyPass(mapper, JSON_EMPTY));
<<<<<<<
mapper = jsonMapperBuilder()
.withCoercionConfigDefaults(h -> h.setCoercion(CoercionInputShape.EmptyString,
CoercionAction.AsNull))
.withCoercionConfig(LogicalType.POJO,
cfg -> cfg.setCoercion(CoercionInputShape.EmptyString,
CoercionAction.Fail))
.build();
_verifyFromEmptyFail(mapper);
=======
mapper = newJsonMapper();
mapper.coercionConfigDefaults().setCoercion(CoercionInputShape.EmptyString,
CoercionAction.AsNull);
mapper.coercionConfigFor(LogicalType.POJO).setCoercion(CoercionInputShape.EmptyString,
CoercionAction.Fail);
_verifyFromEmptyFail(mapper, quote(""));
>>>>>>>
mapper = jsonMapperBuilder()
.withCoercionConfigDefaults(h -> h.setCoercion(CoercionInputShape.EmptyString,
CoercionAction.AsNull))
.withCoercionConfig(LogicalType.POJO,
cfg -> cfg.setCoercion(CoercionInputShape.EmptyString,
CoercionAction.Fail))
.build();
_verifyFromEmptyFail(mapper, JSON_EMPTY);
<<<<<<<
mapper = jsonMapperBuilder()
.withCoercionConfig(LogicalType.POJO,
cfg -> cfg.setCoercion(CoercionInputShape.EmptyString,
CoercionAction.AsEmpty))
.withCoercionConfig(Bean.class,
cfg -> cfg.setCoercion(CoercionInputShape.EmptyString,
CoercionAction.Fail))
.build();
_verifyFromEmptyFail(mapper);
=======
mapper = newJsonMapper();
mapper.coercionConfigFor(LogicalType.POJO).setCoercion(CoercionInputShape.EmptyString,
CoercionAction.AsEmpty);
mapper.coercionConfigFor(Bean.class).setCoercion(CoercionInputShape.EmptyString,
CoercionAction.Fail);
_verifyFromEmptyFail(mapper, quote(""));
>>>>>>>
mapper = jsonMapperBuilder()
.withCoercionConfig(LogicalType.POJO,
cfg -> cfg.setCoercion(CoercionInputShape.EmptyString,
CoercionAction.AsEmpty))
.withCoercionConfig(Bean.class,
cfg -> cfg.setCoercion(CoercionInputShape.EmptyString,
CoercionAction.Fail))
.build();
_verifyFromEmptyFail(mapper, JSON_EMPTY); |
<<<<<<<
import com.google.common.collect.Lists;
import it.unibz.krdb.config.tmappings.types.SimplePredicate;
=======
>>>>>>>
import com.google.common.collect.Lists;
import it.unibz.krdb.config.tmappings.types.SimplePredicate;
<<<<<<<
// //////////////////////////////////////////////////////////////////////////////////////
// Davide>
// T-Mappings Configuration
//
//
// //////////////////////////////////////////////////////////////////////////////////////
private List<SimplePredicate> excludeFromTMappings = Lists.newArrayList();
/* Used to signal whether to apply the user constraints above */
//private boolean applyExcludeFromTMappings = false;
=======
>>>>>>>
// //////////////////////////////////////////////////////////////////////////////////////
// Davide>
// T-Mappings Configuration
//
//
// //////////////////////////////////////////////////////////////////////////////////////
private List<SimplePredicate> excludeFromTMappings = Lists.newArrayList();
/* Used to signal whether to apply the user constraints above */
//private boolean applyExcludeFromTMappings = false;
<<<<<<<
//if( this.applyExcludeFromTMappings )
questInstance.setExcludeFromTMappings(this.excludeFromTMappings);
=======
>>>>>>>
//if( this.applyExcludeFromTMappings )
questInstance.setExcludeFromTMappings(this.excludeFromTMappings); |
<<<<<<<
enum ConnClasses{
MYSQL("com.mysql.jdbc.JDBC4Connection"),
POSTGRES("org.postgresql.jdbc4.Jdbc4Connection"),
DB2("com.ibm.db2.jcc.DB2Connection", "com.ibm.db2.jcc.t4.b");
private final List<String> connClasses;
ConnClasses(String... connClasses){
this.connClasses = Lists.newArrayList(connClasses);
}
public static ConnClasses fromString(String connectionClassName) {
ConnClasses result = null;
if (connectionClassName != null) {
for (ConnClasses b : ConnClasses.values()) {
if(b.connClasses.indexOf(connectionClassName) != -1){
result = b;
}
// if (connectionClassName.equals(b.connClass)) {
// result = b;
// }
}
}
else
throw new IllegalArgumentException("No constant with text " + connectionClassName + " found");
return result;
}
@Override
public String toString(){
return this.connClasses.toString();
}
}
=======
>>>>>>>
enum ConnClasses{
MYSQL("com.mysql.jdbc.JDBC4Connection"),
POSTGRES("org.postgresql.jdbc4.Jdbc4Connection"),
DB2("com.ibm.db2.jcc.DB2Connection", "com.ibm.db2.jcc.t4.b");
private final List<String> connClasses;
ConnClasses(String... connClasses){
this.connClasses = Lists.newArrayList(connClasses);
}
public static ConnClasses fromString(String connectionClassName) {
ConnClasses result = null;
if (connectionClassName != null) {
for (ConnClasses b : ConnClasses.values()) {
if(b.connClasses.indexOf(connectionClassName) != -1){
result = b;
}
// if (connectionClassName.equals(b.connClass)) {
// result = b;
// }
}
}
else
throw new IllegalArgumentException("No constant with text " + connectionClassName + " found");
return result;
}
@Override
public String toString(){
return this.connClasses.toString();
}
} |
<<<<<<<
=======
import org.semanticweb.ontop.owlrefplatform.core.QuestStatement;
import org.semanticweb.ontop.owlrefplatform.core.abox.EquivalentTriplePredicateIterator;
>>>>>>>
import org.semanticweb.ontop.owlrefplatform.core.abox.EquivalentTriplePredicateIterator;
<<<<<<<
SparqlAlgebraToDatalogTranslator tr = new SparqlAlgebraToDatalogTranslator(
questInstance.getUriTemplateMatcher());
=======
SparqlAlgebraToDatalogTranslator tr = new SparqlAlgebraToDatalogTranslator(this.st.getQuestInstance().getUriTemplateMatcher());
>>>>>>>
SparqlAlgebraToDatalogTranslator tr = new SparqlAlgebraToDatalogTranslator(this.st.getQuestInstance().getUriTemplateMatcher()); |
<<<<<<<
import it.unibz.inf.ontop.iq.node.ConstructionNode;
import it.unibz.inf.ontop.iq.node.NativeNode;
import it.unibz.inf.ontop.iq.optimizer.GroundTermRemovalFromDataNodeReshaper;
import it.unibz.inf.ontop.iq.optimizer.PullOutVariableOptimizer;
=======
>>>>>>>
import it.unibz.inf.ontop.iq.node.ConstructionNode;
import it.unibz.inf.ontop.iq.node.NativeNode;
<<<<<<<
import it.unibz.inf.ontop.model.type.TermType;
import it.unibz.inf.ontop.model.type.TermTypeInference;
import it.unibz.inf.ontop.model.type.TypeFactory;
=======
import it.unibz.inf.ontop.model.type.*;
>>>>>>>
import it.unibz.inf.ontop.model.type.*;
<<<<<<<
TypeFactory typeFactory, TermFactory termFactory, IQConverter iqConverter,
IntermediateQueryFactory iqFactory, AtomFactory atomFactory,
UnionFlattener unionFlattener, ImmutabilityTools immutabilityTools,
PushDownBooleanExpressionOptimizer pushDownExpressionOptimizer,
PushUpBooleanExpressionOptimizer pullUpExpressionOptimizer) {
=======
TypeFactory typeFactory, TermFactory termFactory, IntermediateQueryFactory iqFactory,
IQConverter iqConverter, UnionFlattener unionFlattener,
PushDownBooleanExpressionOptimizer pushDownExpressionOptimizer,
OptimizerFactory optimizerFactory, PushUpBooleanExpressionOptimizer pullUpExpressionOptimizer) {
>>>>>>>
TypeFactory typeFactory, TermFactory termFactory, IQConverter iqConverter,
AtomFactory atomFactory, UnionFlattener unionFlattener, PushDownBooleanExpressionOptimizer pushDownExpressionOptimizer,
IntermediateQueryFactory iqFactory, OptimizerFactory optimizerFactory, PushUpBooleanExpressionOptimizer pullUpExpressionOptimizer, ImmutabilityTools immutabilityTools) {
<<<<<<<
pullOutVariableOptimizer, typeExtractor, relation2Predicate, datalogNormalizer, datalogFactory,
typeFactory, termFactory, iqConverter, iqFactory, atomFactory, unionFlattener, immutabilityTools,
pushDownExpressionOptimizer, pullUpExpressionOptimizer);
=======
typeExtractor, relation2Predicate, datalogNormalizer, datalogFactory,
typeFactory, termFactory, iqFactory, iqConverter, unionFlattener, pushDownExpressionOptimizer,
optimizerFactory, pullUpExpressionOptimizer);
>>>>>>>
typeExtractor, relation2Predicate, datalogNormalizer, datalogFactory,
typeFactory, termFactory, iqConverter, atomFactory, unionFlattener, pushDownExpressionOptimizer, iqFactory, optimizerFactory, pullUpExpressionOptimizer, immutabilityTools);
<<<<<<<
public IQ generateSourceQuery(IQ initialIQ, ExecutorRegistry executorRegistry)
=======
SQLExecutableQuery generateSourceQuery(IntermediateQuery intermediateQuery, ImmutableList<String> signature)
>>>>>>>
public IQ generateSourceQuery(IQ initialIQ, ExecutorRegistry executorRegistry)
<<<<<<<
private IQTree normalizeSubTree(IQTree subTree, VariableGenerator variableGenerator, ExecutorRegistry executorRegistry) {
=======
// Trick for pushing down expressions under unions:
// - there the context may be concrete enough for evaluating certain expressions
// - useful for dealing with SPARQL EBVs for instance
IntermediateQuery pushedDownQuery = pushDownExpressionOptimizer.optimize(intermediateQuery);
log.debug("New query after pushing down the boolean expressions (temporary): \n" + pushedDownQuery);
>>>>>>>
private IQTree normalizeSubTree(IQTree subTree, VariableGenerator variableGenerator, ExecutorRegistry executorRegistry) {
<<<<<<<
subQueryDefinitionsBuilder.build(), false);
=======
subQueryDefinitionsBuilder.build(), termTypeMap, false, viewCounter);
>>>>>>>
subQueryDefinitionsBuilder.build(), false, viewCounter);
<<<<<<<
subQueryDefinitionsBuilder.build(), isDistinct && !distinctResultSet);
=======
subQueryDefinitionsBuilder.build(), termTypeMap,
isDistinct && !distinctResultSet, viewCounter);
>>>>>>>
subQueryDefinitionsBuilder.build(), isDistinct && !distinctResultSet,
viewCounter);
<<<<<<<
boolean unionNoDuplicates) {
=======
ImmutableMap<CQIE, ImmutableList<Optional<TermType>>> termTypeMap,
boolean unionNoDuplicates, AtomicInteger viewCounter) {
>>>>>>>
boolean unionNoDuplicates, AtomicInteger viewCounter) {
<<<<<<<
private final String name;
private final String columnAlias;
=======
private final ImmutableList<String> columnAliases;
>>>>>>>
private final String columnAlias;
<<<<<<<
SignatureVariable(String name, String columnAlias, TermType castType) {
this.name = name;
this.columnAlias = columnAlias;
=======
SignatureVariable(String name, ImmutableList<String> columnAliases, TermType castType) {
this.columnAliases = columnAliases;
>>>>>>>
SignatureVariable(String columnAlias, TermType castType) {
this.columnAlias = columnAlias;
<<<<<<<
throw new RuntimeException("SPARQL_LANG is not supported by the SQL generator");
=======
Term subTerm = function.getTerm(0);
if (subTerm instanceof Variable) {
Variable var = (Variable) subTerm;
Optional<QualifiedAttributeID> lang = index.getLangColumn(var);
if (!lang.isPresent())
throw new RuntimeException("Cannot find LANG column for " + var);
return lang.get().getSQLRendering();
}
else {
// Temporary fix
LanguageTag langTag = Optional.of(subTerm)
.filter(t -> t instanceof Function)
.map(t -> ((Function) t).getFunctionSymbol())
.filter(f -> f instanceof DatatypePredicate)
.map(f -> ((DatatypePredicate) f).getReturnedType())
.flatMap(RDFDatatype::getLanguageTag)
.orElseThrow(() -> new RuntimeException("Cannot extract the language tag from "
+ subTerm));
return sqladapter.getSQLLexicalFormString(langTag.getFullString());
}
>>>>>>>
throw new RuntimeException("SPARQL_LANG is not supported by the SQL generator");
<<<<<<<
=======
Optional<QualifiedAttributeID> getTypeColumn(Variable var) {
return getNonMainColumn(var, -2);
}
Optional<QualifiedAttributeID> getLangColumn(Variable var) {
return getNonMainColumn(var, -1);
}
private Optional<QualifiedAttributeID> getNonMainColumn(Variable var, int relativeIndexWrtMainColumn) {
// For each column reference corresponding to the variable.
// For instance, columnRef is `Qans4View`.`v1` .
for (QualifiedAttributeID mainColumn : getColumns(var)) {
// If the var is defined in a ViewDefinition, then there is a
// column for the type and we just need to refer to that column.
//
// For instance, tableColumnType becomes `Qans4View`.`v1QuestType` .
FromItem subQuery = subQueryFromItems.get(mainColumn.getRelation());
if (subQuery != null) {
int mainColumnIndex = subQuery.attributes.indexOf(mainColumn);
return Optional.of(subQuery.attributes.get(
mainColumnIndex + relativeIndexWrtMainColumn));
}
}
return Optional.empty();
}
>>>>>>> |
<<<<<<<
public static final String SPARQL_LIKE_URI = "like";
=======
public static final String SPARQL_COUNT_URI = "Count";
public static final String SPARQL_AVG_URI = "Avg";
public static final String SPARQL_SUM_URI = "Sum";
>>>>>>>
public static final String SPARQL_LIKE_URI = "like";
public static final String SPARQL_COUNT_URI = "Count";
public static final String SPARQL_AVG_URI = "Avg";
public static final String SPARQL_SUM_URI = "Sum";
<<<<<<<
public static final Predicate SPARQL_LIKE = new BooleanOperationPredicateImpl(
SPARQL_LIKE_URI, 2);
=======
public static final Predicate SPARQL_COUNT = new NonBooleanOperationPredicateImpl(
SPARQL_COUNT_URI);
public static final Predicate SPARQL_AVG = new NonBooleanOperationPredicateImpl(
SPARQL_AVG_URI);
public static final Predicate SPARQL_SUM = new NonBooleanOperationPredicateImpl(
SPARQL_SUM_URI);
>>>>>>>
public static final Predicate SPARQL_LIKE = new BooleanOperationPredicateImpl(
SPARQL_LIKE_URI, 2);
public static final Predicate SPARQL_COUNT = new NonBooleanOperationPredicateImpl(
SPARQL_COUNT_URI);
public static final Predicate SPARQL_AVG = new NonBooleanOperationPredicateImpl(
SPARQL_AVG_URI);
public static final Predicate SPARQL_SUM = new NonBooleanOperationPredicateImpl(
SPARQL_SUM_URI); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.