conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
protected void validateNode() throws InvalidIntermediateQueryException {
getRootNode().validateNode(getChildren());
}
@Override
public IQTree acceptTransformer(IQTransformer transformer) {
=======
public IQTree acceptTransformer(IQTreeVisitingTransformer transformer) {
>>>>>>>
protected void validateNode() throws InvalidIntermediateQueryException {
getRootNode().validateNode(getChildren());
}
@Override
public IQTree acceptTransformer(IQTreeVisitingTransformer transformer) { |
<<<<<<<
int length = QueryFactory.createSPARQLs_three_concepts_opt(Settings.dbType).size();
runQueries(conn, Lists.newArrayList(QueryFactory.createSPARQLs_three_concepts_opt(Settings.dbType).get(length-1)));
=======
// int length = QueryFactory.createSPARQLs_three_concepts(Settings.dbType).size();
// runQueries(conn, Lists.newArrayList(QueryFactory.createSPARQLs_three_concepts(Settings.dbType).get(length-1)));
// runQueries(conn, Lists.newArrayList(QueryFactory.createSPARQLs_three_concepts(Settings.dbType).get(length-1)));
>>>>>>>
// int length = QueryFactory.createSPARQLs_three_concepts(Settings.dbType).size();
// runQueries(conn, Lists.newArrayList(QueryFactory.createSPARQLs_three_concepts(Settings.dbType).get(length-1)));
// runQueries(conn, Lists.newArrayList(QueryFactory.createSPARQLs_three_concepts(Settings.dbType).get(length-1)));
<<<<<<<
List<Long> resultsOne = runQueries(conn, QueryFactory.createSPARQLs_one_concepts_opt(Settings.dbType));
=======
List<Long> resultsOne = runQueries(//conn,
QueryFactory.createSPARQLs_one_concepts(Settings.dbType));
>>>>>>>
List<Long> resultsOne = runQueries(//conn,
QueryFactory.createSPARQLs_one_concepts_opt(Settings.dbType));
<<<<<<<
List<Long> resultsTwo = runQueries(conn, QueryFactory.createSPARQLs_two_concepts_opt(Settings.dbType));
=======
List<Long> resultsTwo = runQueries(//conn,
QueryFactory.createSPARQLs_two_concepts(Settings.dbType));
>>>>>>>
List<Long> resultsTwo = runQueries(//conn,
QueryFactory.createSPARQLs_two_concepts_opt(Settings.dbType));
<<<<<<<
List<Long> resultsThree = runQueries(conn, QueryFactory.createSPARQLs_three_concepts_opt(Settings.dbType));
=======
List<Long> resultsThree = runQueries(//conn,
QueryFactory.createSPARQLs_three_concepts(Settings.dbType));
>>>>>>>
List<Long> resultsThree = runQueries(//conn,
QueryFactory.createSPARQLs_three_concepts_opt(Settings.dbType)); |
<<<<<<<
=======
import com.google.common.base.Joiner;
>>>>>>>
import com.google.common.base.Joiner;
<<<<<<<
import org.semanticweb.ontop.ontology.Assertion;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.UriTemplateMatcher;
=======
import org.semanticweb.ontop.model.impl.OBDADataFactoryImpl;
import org.semanticweb.ontop.model.impl.OBDAVocabulary;
import org.semanticweb.ontop.ontology.ClassAssertion;
import org.semanticweb.ontop.ontology.DataPropertyAssertion;
import org.semanticweb.ontop.ontology.ObjectPropertyAssertion;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.*;
>>>>>>>
import org.semanticweb.ontop.model.impl.OBDADataFactoryImpl;
import org.semanticweb.ontop.model.impl.OBDAVocabulary;
import org.semanticweb.ontop.ontology.ClassAssertion;
import org.semanticweb.ontop.ontology.DataPropertyAssertion;
import org.semanticweb.ontop.ontology.ObjectPropertyAssertion;
import org.semanticweb.ontop.owlrefplatform.core.basicoperations.*; |
<<<<<<<
public ImmutableTerm visitBlank(BlankContext ctx) {
if (ctx.BLANK_NODE_FUNCTION() != null){
=======
public Term visitBlank(BlankContext ctx) {
if (ctx.BLANK_NODE_FUNCTION() != null) {
>>>>>>>
public ImmutableTerm visitBlank(BlankContext ctx) {
if (ctx.BLANK_NODE_FUNCTION() != null){
<<<<<<<
public ImmutableTerm visitTypedLiteral(TypedLiteralContext ctx) {
ImmutableTerm stringValue = visitStringLiteral(ctx.stringLiteral());
=======
public Term visitTypedLiteral(TypedLiteralContext ctx) {
Term stringValue = visitLitString(ctx.litString());
>>>>>>>
public ImmutableTerm visitTypedLiteral(TypedLiteralContext ctx) {
ImmutableTerm stringValue = visitLitString(ctx.litString()); |
<<<<<<<
public void setupInVirtualMode(Collection<OBDAMappingAxiom> mappings, Connection localConnection, VocabularyValidator vocabularyValidator, TBoxReasoner reformulationReasoner, Ontology inputOntology, TMappingExclusionConfig excludeFromTMappings)
throws SQLException, JSQLParserException, OBDAException {
=======
public void setupInVirtualMode(Collection<OBDAMappingAxiom> mappings, Connection localConnection, VocabularyValidator vocabularyValidator, TBoxReasoner reformulationReasoner, Ontology inputOntology, TMappingExclusionConfig excludeFromTMappings, boolean queryingAnnotationsInOntology, boolean sameAs)
throws SQLException, JSQLParserException, OBDAException {
>>>>>>>
public void setupInVirtualMode(Collection<OBDAMappingAxiom> mappings, Connection localConnection, VocabularyValidator vocabularyValidator, TBoxReasoner reformulationReasoner, Ontology inputOntology, TMappingExclusionConfig excludeFromTMappings, boolean queryingAnnotationsInOntology, boolean sameAs)
throws SQLException, JSQLParserException, OBDAException {
<<<<<<<
/**
=======
/**
* add sameAsInverse
*/
mappings.addAll(MappingSameAs.addSameAsInverse(mappings));
/**
>>>>>>>
/**
* add sameAsInverse
*/
mappings.addAll(MappingSameAs.addSameAsInverse(mappings));
/**
<<<<<<<
// Adding ontology assertions (ABox) as rules (facts, head with no body).
addAssertionsAsFacts(unfoldingProgram, inputOntology.getClassAssertions(),
inputOntology.getObjectPropertyAssertions(), inputOntology.getDataPropertyAssertions());
=======
>>>>>>>
<<<<<<<
=======
* @param reformulationReasoner
* @param mappings
* @throws OBDAException
>>>>>>>
* @param reformulationReasoner
* @param mappings
* @throws OBDAException
<<<<<<<
private void addAssertionsAsFacts(List<CQIE> unfoldingProgram, Iterable<ClassAssertion> cas,
Iterable<ObjectPropertyAssertion> pas, Iterable<DataPropertyAssertion> das) {
=======
private void addAssertionsAsFacts(List<CQIE> unfoldingProgram, Iterable<ClassAssertion> cas,
Iterable<ObjectPropertyAssertion> pas, Iterable<DataPropertyAssertion> das, List<AnnotationAssertion> aas) {
>>>>>>>
private void addAssertionsAsFacts(List<CQIE> unfoldingProgram, Iterable<ClassAssertion> cas,
Iterable<ObjectPropertyAssertion> pas, Iterable<DataPropertyAssertion> das, List<AnnotationAssertion> aas) {
<<<<<<<
public DatalogProgram unfold(DatalogProgram programAfterRewriting) {
return unfolder.unfold(programAfterRewriting, "ans1",QuestConstants.BUP, true);
}
public MetadataForQueryOptimization getMetadataForQueryOptimization() {
return metadataForQueryOptimization;
}
public ImmutableList<Predicate> getExtensionalPredicates() {
return unfolder.getExtensionalPredicates();
}
=======
/**
* Store information about owl:sameAs
*/
public void addSameAsMapping(List<CQIE> unfoldingProgram) throws OBDAException{
MappingSameAs msa = new MappingSameAs(unfoldingProgram);
dataPropertiesAndClassesMapped = msa.getDataPropertiesAndClassesWithSameAs();
objectPropertiesMapped = msa.getObjectPropertiesWithSameAs();
}
public Set<Predicate> getSameAsDataPredicatesAndClasses(){
return dataPropertiesAndClassesMapped;
}
public Set<Predicate> getSameAsObjectPredicates(){
return objectPropertiesMapped;
}
>>>>>>>
/**
* Store information about owl:sameAs
*/
public void addSameAsMapping(List<CQIE> unfoldingProgram) throws OBDAException{
MappingSameAs msa = new MappingSameAs(unfoldingProgram);
dataPropertiesAndClassesMapped = msa.getDataPropertiesAndClassesWithSameAs();
objectPropertiesMapped = msa.getObjectPropertiesWithSameAs();
}
public Set<Predicate> getSameAsDataPredicatesAndClasses(){
return dataPropertiesAndClassesMapped;
}
public Set<Predicate> getSameAsObjectPredicates(){
return objectPropertiesMapped;
}
public DatalogProgram unfold(DatalogProgram programAfterRewriting) {
return unfolder.unfold(programAfterRewriting, "ans1",QuestConstants.BUP, true);
}
public MetadataForQueryOptimization getMetadataForQueryOptimization() {
return metadataForQueryOptimization;
}
public ImmutableList<Predicate> getExtensionalPredicates() {
return unfolder.getExtensionalPredicates();
} |
<<<<<<<
import it.unibz.inf.ontop.model.vocabulary.SPARQL;
import it.unibz.inf.ontop.model.vocabulary.XPathFunction;
import it.unibz.inf.ontop.model.vocabulary.XSD;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
=======
>>>>>>>
import it.unibz.inf.ontop.model.vocabulary.SPARQL;
import it.unibz.inf.ontop.model.vocabulary.XPathFunction;
import it.unibz.inf.ontop.model.vocabulary.XSD;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
<<<<<<<
return new TranslationResult(getAtomsExtended(eqAtoms.stream()), ImmutableSet.copyOf(vars), true);
=======
return new TranslationResult(getAtomsExtended(eqAtoms.stream()), new LinkedHashSet<>(vars), false);
>>>>>>>
return new TranslationResult(getAtomsExtended(eqAtoms.stream()), new LinkedHashSet<>(vars), true);
<<<<<<<
private ImmutableExpression getFilterExpression(ValueExpr expr, ImmutableSet<Variable> variables)
=======
private Function getFilterExpression(ValueExpr expr, LinkedHashSet<Variable> variables)
>>>>>>>
private ImmutableExpression getFilterExpression(ValueExpr expr, LinkedHashSet<Variable> variables) |
<<<<<<<
import it.unibz.inf.ontop.exception.OntopUnsupportedInputQueryException;
import it.unibz.inf.ontop.injection.IntermediateQueryFactory;
=======
>>>>>>>
import it.unibz.inf.ontop.injection.IntermediateQueryFactory;
<<<<<<<
this.atomFactory = atomFactory;
this.termFactory = termFactory;
this.iqFactory = iqFactory;
ClassifiedTBox saturatedTBox = obdaSpecification.getSaturatedTBox();
this.sigma = inclusionDependencyTools.getABoxDependencies(saturatedTBox, true);
=======
>>>>>>>
this.atomFactory = atomFactory;
this.termFactory = termFactory;
this.iqFactory = iqFactory; |
<<<<<<<
import it.unibz.inf.ontop.iq.IQTree;
import it.unibz.inf.ontop.iq.transform.NoNullValueEnforcer;
import it.unibz.inf.ontop.model.type.TypeFactory;
=======
import it.unibz.inf.ontop.model.term.TermFactory;
>>>>>>>
import it.unibz.inf.ontop.iq.IQTree;
import it.unibz.inf.ontop.iq.transform.NoNullValueEnforcer;
<<<<<<<
private final NoNullValueEnforcer noNullValueEnforcer;
private final IntermediateQueryFactory iqFactory;
=======
private final JDBCMetadataProviderFactory metadataProviderFactory;
private final MetaMappingExpander expander;
>>>>>>>
private final NoNullValueEnforcer noNullValueEnforcer;
private final IntermediateQueryFactory iqFactory;
private final JDBCMetadataProviderFactory metadataProviderFactory;
<<<<<<<
this.noNullValueEnforcer = noNullValueEnforcer;
this.iqFactory = iqFactory;
this.metamappingExpander = metamappingExpander;
=======
this.metadataProviderFactory = metadataProviderFactory;
this.expander = new MetaMappingExpander(termFactory, substitutionFactory, rdfFactory, sourceQueryFactory);
>>>>>>>
this.noNullValueEnforcer = noNullValueEnforcer;
this.iqFactory = iqFactory;
this.metamappingExpander = metamappingExpander;
this.metadataProviderFactory = metadataProviderFactory; |
<<<<<<<
import java.awt.Color;
import java.awt.Component;
import java.awt.event.KeyEvent;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Vector;
import java.util.concurrent.CountDownLatch;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.swing.DefaultComboBoxModel;
import javax.swing.DefaultListCellRenderer;
import javax.swing.ImageIcon;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.SwingUtilities;
import javax.swing.UIManager;
import javax.swing.plaf.metal.MetalComboBoxButton;
import javax.swing.table.TableModel;
<<<<<<< HEAD:ontop-protege/src/main/java/it/unibz/inf/ontop/protege/panels/MappingAssistantPanel.java
import it.unibz.inf.ontop.protege.core.OBDAModelWrapper;
import it.unibz.inf.ontop.protege.gui.IconLoader;
import it.unibz.inf.ontop.protege.gui.MapItem;
import it.unibz.inf.ontop.protege.gui.PredicateItem;
import it.unibz.inf.ontop.protege.gui.SQLResultSetTableModel;
import it.unibz.inf.ontop.protege.gui.component.AutoSuggestComboBox;
import it.unibz.inf.ontop.protege.gui.component.PropertyMappingPanel;
import it.unibz.inf.ontop.protege.gui.component.SQLResultTable;
import it.unibz.inf.ontop.protege.gui.treemodels.IncrementalResultSetTableModel;
import it.unibz.inf.ontop.protege.utils.DatasourceSelectorListener;
import it.unibz.inf.ontop.protege.utils.DialogUtils;
import it.unibz.inf.ontop.protege.utils.OBDAProgessMonitor;
import it.unibz.inf.ontop.protege.utils.OBDAProgressListener;
import it.unibz.inf.ontop.exception.DuplicateMappingException;
import it.unibz.inf.ontop.injection.NativeQueryLanguageComponentFactory;
import it.unibz.inf.ontop.io.PrefixManager;
import it.unibz.inf.ontop.model.*;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
import it.unibz.inf.ontop.model.impl.RDBMSourceParameterConstants;
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
import it.unibz.inf.ontop.ontology.OClass;
import it.unibz.inf.ontop.owlrefplatform.core.queryevaluation.SQLAdapterFactory;
import it.unibz.inf.ontop.owlrefplatform.core.queryevaluation.SQLDialectAdapter;
import it.unibz.inf.ontop.owlrefplatform.core.queryevaluation.SQLServerSQLDialectAdapter;
import it.unibz.inf.ontop.sql.DBMetadata;
import it.unibz.inf.ontop.sql.DataDefinition;
import it.unibz.inf.ontop.sql.JDBCConnectionManager;
import it.unibz.inf.ontop.sql.TableDefinition;
import it.unibz.inf.ontop.sql.ViewDefinition;
=======
=======
>>>>>>>
<<<<<<<
import it.unibz.inf.ontop.sql.DBMetadata;
import it.unibz.inf.ontop.sql.DataDefinition;
import it.unibz.inf.ontop.sql.TableDefinition;
import it.unibz.inf.ontop.sql.ViewDefinition;
import it.unibz.inf.ontop.io.PrefixManager;
import it.unibz.inf.ontop.model.CQIE;
import it.unibz.inf.ontop.model.Function;
import it.unibz.inf.ontop.model.OBDADataFactory;
import it.unibz.inf.ontop.model.OBDADataSource;
import it.unibz.inf.ontop.model.OBDALibConstants;
import it.unibz.inf.ontop.model.OBDAMappingAxiom;
import it.unibz.inf.ontop.model.OBDAModel;
import it.unibz.inf.ontop.model.OBDAQuery;
import it.unibz.inf.ontop.model.Predicate;
import it.unibz.inf.ontop.model.Term;
import it.unibz.inf.ontop.model.ValueConstant;
import it.unibz.inf.ontop.model.Variable;
import it.unibz.inf.ontop.protege.gui.SQLResultSetTableModel;
import it.unibz.inf.ontop.protege.gui.component.SQLResultTable;
import it.unibz.inf.ontop.protege.utils.OBDAProgessMonitor;
import it.unibz.inf.ontop.sql.JDBCConnectionManager;
>>>>>>> v3/package-names-changed:ontop-protege/src/main/java/it/unibz/inf/ontop/protege/panels/MappingAssistantPanel.java
import it.unibz.inf.ontop.sql.api.Attribute;
=======
import it.unibz.inf.ontop.protege.utils.OBDAProgressMonitor;
import it.unibz.inf.ontop.sql.*;
import javax.swing.*;
import javax.swing.plaf.metal.MetalComboBoxButton;
import javax.swing.table.TableModel;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.KeyEvent;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Vector;
import java.util.concurrent.CountDownLatch;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
>>>>>>>
import it.unibz.inf.ontop.protege.utils.OBDAProgressMonitor;
import it.unibz.inf.ontop.sql.*;
import javax.swing.*;
import javax.swing.plaf.metal.MetalComboBoxButton;
import javax.swing.table.TableModel;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.KeyEvent;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Vector;
import java.util.concurrent.CountDownLatch;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
<<<<<<<
private OBDAModelWrapper obdaModel;
=======
private final OBDAModel obdaModel;
>>>>>>>
private final OBDAModelWrapper obdaModel;
<<<<<<<
predicateObjectMapsList = pnlPropertyEditorList.getPredicateObjectMapsList();
CQIE target = prepareTargetQuery(predicateSubjectMap, predicateObjectMapsList);
=======
List<MapItem> predicateObjectMapsList = pnlPropertyEditorList.getPredicateObjectMapsList();
List<Function> target = prepareTargetQuery(predicateSubjectMap, predicateObjectMapsList);
if (target.isEmpty()) {
JOptionPane.showMessageDialog(this, "ERROR: The target cannot be empty. Add a class or a property", "Error", JOptionPane.ERROR_MESSAGE);
return;
}
>>>>>>>
List<MapItem> predicateObjectMapsList = pnlPropertyEditorList.getPredicateObjectMapsList();
List<Function> target = prepareTargetQuery(predicateSubjectMap, predicateObjectMapsList);
if (target.isEmpty()) {
JOptionPane.showMessageDialog(this, "ERROR: The target cannot be empty. Add a class or a property", "Error", JOptionPane.ERROR_MESSAGE);
return;
}
<<<<<<<
OBDAMappingAxiom mappingAxiom = nativeQLFactory.create(dfac.getSQLQuery(source), target);
obdaModel.addMapping(selectedSource.getSourceID(), mappingAxiom);
=======
OBDAMappingAxiom mappingAxiom = dfac.getRDBMSMappingAxiom(dfac.getSQLQuery(source), target);
obdaModel.addMapping(selectedSource.getSourceID(), mappingAxiom, false);
>>>>>>>
OBDAMappingAxiom mappingAxiom = nativeQLFactory.create(dfac.getSQLQuery(source), target);
obdaModel.addMapping(selectedSource.getSourceID(), mappingAxiom, false);
<<<<<<<
//TODO: find a way to get the current preferences. Necessary if an third-party adapter should be used.
SQLDialectAdapter sqlDialect = SQLAdapterFactory.getSQLDialectAdapter(dbType, new QuestPreferences());
=======
//second parameter is database version, not relevant in this step
SQLDialectAdapter sqlDialect = SQLAdapterFactory.getSQLDialectAdapter(dbType, "");
>>>>>>>
//TODO: find a way to get the current preferences. Necessary if an third-party adapter should be used.
SQLDialectAdapter sqlDialect = SQLAdapterFactory.getSQLDialectAdapter(dbType, new QuestPreferences()); |
<<<<<<<
<<<<<<< HEAD:ontop-protege/src/main/java/it/unibz/inf/ontop/protege/panels/OBDAPreferencesPanel.java
import it.unibz.inf.ontop.protege.core.ProtegeOBDAPreferences;
=======
import it.unibz.inf.ontop.utils.OBDAPreferences;
>>>>>>> v3/package-names-changed:ontop-protege/src/main/java/it/unibz/inf/ontop/protege/panels/OBDAPreferencesPanel.java
=======
>>>>>>>
<<<<<<<
jCheckBoxUseDefault.setSelected(new Boolean(pref.get(ProtegeOBDAPreferences.USE_DEAFAULT).toString()));
=======
jCheckBoxUseDefault.setSelected(Boolean.parseBoolean(pref.get(OBDAPreferences.USE_DEAFAULT).toString()));
>>>>>>>
jCheckBoxUseDefault.setSelected(new Boolean(pref.get(DisposableOBDAPreferences.USE_DEAFAULT).toString())); |
<<<<<<<
=======
import it.unibz.inf.ontop.io.ModelIOManager;
import it.unibz.inf.ontop.model.OBDADataFactory;
import it.unibz.inf.ontop.model.OBDAException;
import it.unibz.inf.ontop.model.OBDAModel;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
import it.unibz.inf.ontop.owlrefplatform.core.QuestConstants;
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
import it.unibz.inf.ontop.owlrefplatform.owlapi.*;
>>>>>>>
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
import it.unibz.inf.ontop.owlrefplatform.owlapi.*;
<<<<<<<
import it.unibz.inf.ontop.exception.DuplicateMappingException;
import it.unibz.inf.ontop.exception.InvalidMappingException;
import it.unibz.inf.ontop.io.InvalidDataSourceException;
import it.unibz.inf.ontop.model.OBDAException;
import it.unibz.inf.ontop.owlrefplatform.core.QuestConstants;
import it.unibz.inf.ontop.owlrefplatform.core.QuestPreferences;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWL;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLConnection;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLFactory;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLStatement;
=======
>>>>>>>
import it.unibz.inf.ontop.model.OBDAException;
<<<<<<<
import java.io.IOException;
import java.util.Properties;
=======
>>>>>>>
<<<<<<<
private void runQuery(String varName) throws OBDAException, OWLException, DuplicateMappingException,
InvalidMappingException, InvalidDataSourceException, IOException {
=======
private void runQuery(String varName) throws OBDAException, OWLException {
>>>>>>>
private void runQuery(String varName) throws OBDAException, OWLException{
<<<<<<<
QuestOWLFactory factory = new QuestOWLFactory(new File(obdafile1), new QuestPreferences(p));
reasoner = factory.createReasoner(ontology, new SimpleConfiguration());
=======
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder().obdaModel(obdaModel).preferences(p).build();
reasoner = factory.createReasoner(ontology, config);
>>>>>>>
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder()
.nativeOntopMappingFile(new File(obdafile1))
.preferences(p)
.build();
reasoner = factory.createReasoner(ontology, config); |
<<<<<<<
public boolean composeTerms(Term term1, Term term2) {
throw new UnsupportedOperationException();
}
@Override
public boolean composeFunctions(Function term1, Function term2) {
throw new UnsupportedOperationException();
}
@Override
public boolean compose(Substitution substitution) {
throw new UnsupportedOperationException();
}
@Override
public Term get(Variable var) {
=======
public Term get(VariableImpl var) {
>>>>>>>
public Term get(Variable var) {
<<<<<<<
public Map<Variable, Term> getMap() {
=======
public ImmutableMap<VariableImpl, Term> getMap() {
>>>>>>>
public ImmutableMap<Variable, Term> getMap() { |
<<<<<<<
import com.google.inject.Inject;
import it.unibz.inf.ontop.exception.MappingIOException;
import it.unibz.inf.ontop.injection.MappingFactory;
import it.unibz.inf.ontop.mapping.MappingMetadata;
import it.unibz.inf.ontop.model.Function;
import it.unibz.inf.ontop.model.UriTemplateMatcher;
=======
import com.google.common.collect.ImmutableSet;
import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
import it.unibz.inf.ontop.ontology.impl.OntologyVocabularyImpl;
import org.apache.commons.rdf.rdf4j.RDF4J;
>>>>>>>
import com.google.inject.Inject;
import it.unibz.inf.ontop.exception.MappingIOException;
import it.unibz.inf.ontop.injection.MappingFactory;
import it.unibz.inf.ontop.mapping.MappingMetadata;
import it.unibz.inf.ontop.model.Function;
import it.unibz.inf.ontop.model.UriTemplateMatcher;
import com.google.common.collect.ImmutableSet;
import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
import it.unibz.inf.ontop.ontology.impl.OntologyVocabularyImpl;
import org.apache.commons.rdf.rdf4j.RDF4J;
<<<<<<<
public OBDAModel parse(Model mappingGraph) throws InvalidMappingException, DuplicateMappingException {
R2RMLManager r2rmlManager = new R2RMLManager(mappingGraph, nativeQLFactory);
return parse(r2rmlManager);
}
=======
public OBDAModel getOBDAModel() throws InvalidMappingException, IOException, InvalidDataSourceException,
DuplicateMappingException {
/**
* The OBDA model is only computed once.
*/
if (obdaModel != null) {
return obdaModel;
}
R2RMLManager r2rmlManager;
if (mappingFile != null)
try {
r2rmlManager = new R2RMLManager(mappingFile, nativeQLFactory);
} catch (RDFParseException | RDFHandlerException e) {
throw new InvalidDataSourceException(e.getMessage());
}
else if (mappingGraph != null){
r2rmlManager = new R2RMLManager(new RDF4J().asGraph(mappingGraph), nativeQLFactory);
}
else
throw new RuntimeException("Internal inconsistency. A mappingFile or a mappingGraph should be defined.");
OBDADataSource dataSource = this.predefinedDataSource;
/**
* If the data source is not already defined, extracts it from the preferences.
*/
if (dataSource == null) {
OBDADataSourceFromConfigExtractor dataSourceExtractor = new OBDADataSourceFromConfigExtractor(configuration);
dataSource = dataSourceExtractor.getDataSource() ;
}
>>>>>>>
public OBDAModel parse(Model mappingGraph) throws InvalidMappingException, DuplicateMappingException {
R2RMLManager r2rmlManager = new R2RMLManager(new RDF4J().asGraph(mappingGraph), nativeQLFactory);
return parse(r2rmlManager);
} |
<<<<<<<
import it.unibz.inf.ontop.owlrefplatform.injection.QuestCorePreferences;
import it.unibz.inf.ontop.rdf4j.repository.OntopVirtualRepository;
=======
import it.unibz.inf.ontop.injection.QuestCoreSettings;
import it.unibz.inf.ontop.sesame.SesameVirtualRepo;
>>>>>>>
import it.unibz.inf.ontop.injection.QuestCoreSettings;
import it.unibz.inf.ontop.rdf4j.repository.OntopVirtualRepository; |
<<<<<<<
=======
import it.unibz.inf.ontop.io.ModelIOManager;
import it.unibz.inf.ontop.model.OBDADataFactory;
import it.unibz.inf.ontop.model.OBDAModel;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
>>>>>>>
<<<<<<<
import it.unibz.inf.ontop.owlrefplatform.owlapi3.*;
import junit.framework.TestCase;
=======
import it.unibz.inf.ontop.owlrefplatform.owlapi.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
>>>>>>>
import it.unibz.inf.ontop.owlrefplatform.owlapi.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
<<<<<<<
=======
import org.semanticweb.owlapi.reasoner.ReasonerInternalException;
>>>>>>>
import org.semanticweb.owlapi.reasoner.ReasonerInternalException;
<<<<<<<
=======
// Loading the OBDA data
obdaModel = fac.getOBDAModel();
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load(obdafile);
>>>>>>> |
<<<<<<<
private DummyRDBMetadata(AtomFactory atomFactory, TermFactory termFactory, TypeFactory typeFactory,
DatalogFactory datalogFactory) {
=======
private DummyRDBMetadata(TypeFactory typeFactory, JdbcTypeMapper jdbcTypeMapper) {
>>>>>>>
private DummyRDBMetadata(TypeFactory typeFactory) {
<<<<<<<
new QuotedIDFactoryStandardSQL("\""), atomFactory,
termFactory, typeFactory, datalogFactory);
=======
new QuotedIDFactoryStandardSQL("\""), jdbcTypeMapper,
typeFactory);
>>>>>>>
new QuotedIDFactoryStandardSQL("\""), typeFactory); |
<<<<<<<
import it.unibz.inf.ontop.model.atom.AtomFactory;
import it.unibz.inf.ontop.model.atom.DistinctVariableOnlyDataAtom;
import it.unibz.inf.ontop.model.term.TermFactory;
import it.unibz.inf.ontop.model.term.Variable;
import it.unibz.inf.ontop.model.term.functionsymbol.Predicate;
=======
>>>>>>>
import it.unibz.inf.ontop.model.atom.AtomFactory;
import it.unibz.inf.ontop.model.atom.DistinctVariableOnlyDataAtom;
import it.unibz.inf.ontop.model.term.TermFactory;
import it.unibz.inf.ontop.model.term.Variable;
<<<<<<<
import it.unibz.inf.ontop.substitution.impl.SubstitutionUtilities;
import it.unibz.inf.ontop.substitution.impl.UnifierUtilities;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
=======
>>>>>>>
import it.unibz.inf.ontop.utils.ImmutableCollectors;
<<<<<<<
private final UnifierUtilities unifierUtilities;
private final SubstitutionUtilities substitutionUtilities;
=======
>>>>>>>
<<<<<<<
private final AtomFactory atomFactory;
private final TermFactory termFactory;
private final IntermediateQueryFactory iqFactory;
=======
private final DatalogProgram2QueryConverter datalogConverter;
>>>>>>>
private final DatalogProgram2QueryConverter datalogConverter;
private final TermFactory termFactory;
private final AtomFactory atomFactory;
private final IntermediateQueryFactory iqFactory;
<<<<<<<
@Assisted ExecutorRegistry executorRegistry,
QueryCache queryCache,
BindingLiftOptimizer bindingLiftOptimizer, OntopReformulationSettings settings,
DatalogProgram2QueryConverter datalogConverter,
TranslationFactory translationFactory,
QueryRewriter queryRewriter,
JoinLikeOptimizer joinLikeOptimizer,
InputQueryFactory inputQueryFactory,
DatalogFactory datalogFactory,
DatalogNormalizer datalogNormalizer, FlattenUnionOptimizer flattenUnionOptimizer,
EQNormalizer eqNormalizer, UnifierUtilities unifierUtilities,
SubstitutionUtilities substitutionUtilities,
PushUpBooleanExpressionOptimizer pullUpExpressionOptimizer, IQConverter iqConverter,
AtomFactory atomFactory, TermFactory termFactory, IntermediateQueryFactory iqFactory) {
=======
@Assisted ExecutorRegistry executorRegistry,
QueryCache queryCache,
BindingLiftOptimizer bindingLiftOptimizer, OntopReformulationSettings settings,
TranslationFactory translationFactory,
QueryRewriter queryRewriter,
JoinLikeOptimizer joinLikeOptimizer,
InputQueryFactory inputQueryFactory,
DatalogFactory datalogFactory,
FlattenUnionOptimizer flattenUnionOptimizer,
EQNormalizer eqNormalizer,
PushUpBooleanExpressionOptimizer pullUpExpressionOptimizer,
IQConverter iqConverter, DatalogProgram2QueryConverter datalogConverter) {
>>>>>>>
@Assisted ExecutorRegistry executorRegistry,
QueryCache queryCache,
BindingLiftOptimizer bindingLiftOptimizer, OntopReformulationSettings settings,
TranslationFactory translationFactory,
QueryRewriter queryRewriter,
JoinLikeOptimizer joinLikeOptimizer,
InputQueryFactory inputQueryFactory,
DatalogFactory datalogFactory,
FlattenUnionOptimizer flattenUnionOptimizer,
EQNormalizer eqNormalizer,
PushUpBooleanExpressionOptimizer pullUpExpressionOptimizer,
IQConverter iqConverter, DatalogProgram2QueryConverter datalogConverter,
TermFactory termFactory, AtomFactory atomFactory, IntermediateQueryFactory iqFactory) {
<<<<<<<
this.unifierUtilities = unifierUtilities;
this.substitutionUtilities = substitutionUtilities;
=======
>>>>>>> |
<<<<<<<
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
=======
import java.util.List;
>>>>>>>
import java.util.HashMap;
import java.util.Map;
import java.util.List; |
<<<<<<<
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import it.unibz.krdb.obda.model.AlgebraOperatorPredicate;
import it.unibz.krdb.obda.model.BooleanOperationPredicate;
import it.unibz.krdb.obda.model.CQIE;
import it.unibz.krdb.obda.model.Constant;
import it.unibz.krdb.obda.model.DatalogProgram;
import it.unibz.krdb.obda.model.Function;
import it.unibz.krdb.obda.model.Term;
import it.unibz.krdb.obda.model.OBDADataFactory;
import it.unibz.krdb.obda.model.Predicate;
import it.unibz.krdb.obda.model.Variable;
=======
import it.unibz.krdb.obda.model.*;
>>>>>>>
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import it.unibz.krdb.obda.model.*;
<<<<<<<
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
=======
>>>>>>>
<<<<<<<
private Multimap<Predicate, List<Integer>> primaryKeys = HashMultimap.create();
=======
private final Map<Predicate, List<Integer>> primaryKeys;
>>>>>>>
private final Multimap<Predicate, List<Integer>> primaryKeys;
<<<<<<<
public DatalogUnfolder(DatalogProgram unfoldingProgram) {
this(unfoldingProgram.getRules(), HashMultimap.<Predicate, List<Integer>>create());
}
=======
>>>>>>>
<<<<<<<
* @param resolutionCount
* The number of resolution attemts done globaly, needed to spawn
* fresh variables.
* @param termidx
=======
* @param atomindx
>>>>>>>
* @param atomindx
<<<<<<<
Collection<List<Integer>> pKeys = primaryKeys.get(newatom.getFunctionSymbol());
for(List<Integer> pKey : pKeys){
if (!(pKey != null && !pKey.isEmpty())) {
// no pkeys for this predicate
continue;
}
/*
* the predicate has a primary key, looking for candidates for
* unification, when we find one we can stop, since the application
* of this optimization at each step of the derivation tree
* guarantees there wont be any other redundant atom.
*/
Function replacement = null;
Unifier mgu1 = null;
for (int idx2 = 0; idx2 < termidx.peek(); idx2++) {
Function tempatom = (Function) innerAtoms.get(idx2);
if (!tempatom.getFunctionSymbol().equals(newatom.getFunctionSymbol())) {
/*
* predicates are different, atoms cant be unified
*/
continue;
}
boolean redundant = true;
for (Integer termidx2 : pKey) {
if (!newatom.getTerm(termidx2 - 1).equals(tempatom.getTerm(termidx2 - 1))) {
redundant = false;
break;
}
}
if (redundant) {
/* found a candidate replacement atom */
mgu1 = Unifier.getMGU(newatom, tempatom);
if (mgu1 != null) {
replacement = tempatom;
break;
}
}
}
if (replacement == null)
continue;
if (mgu1 == null)
throw new RuntimeException("Unexpected case found while performing JOIN elimination. Contact the authors for debugging.");
if (currentAtom.isAlgebraFunction() && currentAtom.getFunctionSymbol().equals(OBDAVocabulary.SPARQL_LEFTJOIN)) {
continue;
}
UnifierUtilities.applyUnifier(partialEvalution, mgu1, false);
innerAtoms.remove(newatomidx);
newatomidx -= 1;
newatomcount -= 1;
break;
}
=======
List<Integer> pkey = primaryKeys.get(newatom.getFunctionSymbol());
if (!(pkey != null && !pkey.isEmpty())) {
// no pkeys for this predicate
continue;
}
/*
* the predicate has a primary key, looking for candidates for
* unification, when we find one we can stop, since the application
* of this optimization at each step of the derivation tree
* guarantees there wont be any other redundant atom.
*/
Function replacement = null;
Substitution mgu1 = null;
for (int idx2 = 0; idx2 < termidx.peek(); idx2++) {
Function tempatom = innerAtoms.get(idx2);
if (!tempatom.getFunctionSymbol().equals(newatom.getFunctionSymbol())) {
/*
* predicates are different, atoms cant be unified
*/
continue;
}
boolean redundant = true;
for (Integer termidx2 : pkey) {
if (!newatom.getTerm(termidx2 - 1).equals(tempatom.getTerm(termidx2 - 1))) {
redundant = false;
break;
}
}
if (redundant) {
/* found a candidate replacement atom */
mgu1 = UnifierUtilities.getMGU(newatom, tempatom);
if (mgu1 != null) {
replacement = tempatom;
break;
}
}
}
if (replacement == null)
continue;
if (mgu1 == null)
throw new RuntimeException("Unexpected case found while performing JOIN elimination. Contact the authors for debugging.");
if (currentAtom.isAlgebraFunction() && (currentAtom.getFunctionSymbol() == OBDAVocabulary.SPARQL_LEFTJOIN)) {
continue;
}
SubstitutionUtilities.applySubstitution(partialEvalution, mgu1, false);
innerAtoms.remove(newatomidx);
newatomidx -= 1;
newatomcount -= 1;
>>>>>>>
Collection<List<Integer>> pKeys = primaryKeys.get(newatom.getFunctionSymbol());
for(List<Integer> pKey : pKeys){
if (!(pKey != null && !pKey.isEmpty())) {
// no pkeys for this predicate
continue;
}
/*
* the predicate has a primary key, looking for candidates for
* unification, when we find one we can stop, since the application
* of this optimization at each step of the derivation tree
* guarantees there wont be any other redundant atom.
*/
Function replacement = null;
Substitution mgu1 = null;
for (int idx2 = 0; idx2 < termidx.peek(); idx2++) {
Function tempatom = innerAtoms.get(idx2);
if (!tempatom.getFunctionSymbol().equals(newatom.getFunctionSymbol())) {
/*
* predicates are different, atoms cant be unified
*/
continue;
}
boolean redundant = true;
for (Integer termidx2 : pKey) {
if (!newatom.getTerm(termidx2 - 1).equals(tempatom.getTerm(termidx2 - 1))) {
redundant = false;
break;
}
}
if (redundant) {
/* found a candidate replacement atom */
mgu1 = UnifierUtilities.getMGU(newatom, tempatom);
if (mgu1 != null) {
replacement = tempatom;
break;
}
}
}
if (replacement == null)
continue;
if (mgu1 == null)
throw new RuntimeException("Unexpected case found while performing JOIN elimination. Contact the authors for debugging.");
if (currentAtom.isAlgebraFunction() && (currentAtom.getFunctionSymbol() == OBDAVocabulary.SPARQL_LEFTJOIN)) {
continue;
}
SubstitutionUtilities.applySubstitution(partialEvalution, mgu1, false);
innerAtoms.remove(newatomidx);
newatomidx -= 1;
newatomcount -= 1;
break;
} |
<<<<<<<
public Ontology translateAndClassify(Collection<OWLOntology> ontologies) {
log.debug("Load ontologies called. Translating {} ontologies.", ontologies.size());
=======
public static Ontology translateAndClassify(OWLOntology owl) {
>>>>>>>
public Ontology translateAndClassify(OWLOntology owl) { |
<<<<<<<
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY, TYPE_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS);
=======
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS, RDF_FACTORY);
>>>>>>>
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY, TYPE_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS, RDF_FACTORY);
<<<<<<<
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY, TYPE_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS);
=======
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS, RDF_FACTORY);
>>>>>>>
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY, TYPE_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS, RDF_FACTORY);
<<<<<<<
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY, TYPE_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS);
=======
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS, RDF_FACTORY);
>>>>>>>
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY, TYPE_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS, RDF_FACTORY);
<<<<<<<
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY, TYPE_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS);
=======
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS, RDF_FACTORY);
>>>>>>>
UriTemplateMatcher.create(Stream.of(), TERM_FACTORY, TYPE_FACTORY), null, ATOM_FACTORY, TERM_FACTORY,
TYPE_FACTORY, DATALOG_FACTORY, IMMUTABILITY_TOOLS, RDF_FACTORY); |
<<<<<<<
import org.semanticweb.ontop.model.CQIE;
import org.semanticweb.ontop.model.Function;
import org.semanticweb.ontop.model.OBDADataFactory;
import org.semanticweb.ontop.model.Predicate;
import org.semanticweb.ontop.model.Term;
import org.semanticweb.ontop.model.ValueConstant;
import org.semanticweb.ontop.model.Variable;
=======
import org.semanticweb.ontop.model.*;
import org.semanticweb.ontop.model.impl.AnonymousVariable;
>>>>>>>
import org.semanticweb.ontop.model.*; |
<<<<<<<
import it.unibz.inf.ontop.model.term.TermFactory;
=======
import it.unibz.inf.ontop.model.term.ImmutableTerm;
>>>>>>>
import it.unibz.inf.ontop.model.term.ImmutableTerm;
import it.unibz.inf.ontop.model.term.TermFactory; |
<<<<<<<
import it.unibz.krdb.sql.QuotedIDFactory;
import it.unibz.krdb.sql.api.AllComparison;
import it.unibz.krdb.sql.api.AnyComparison;
import it.unibz.krdb.sql.api.ParsedSQLQuery;
=======
import it.unibz.krdb.sql.api.*;
>>>>>>>
import it.unibz.krdb.sql.QuotedIDFactory;
import it.unibz.krdb.sql.api.*;
<<<<<<<
import net.sf.jsqlparser.statement.select.*;
=======
import net.sf.jsqlparser.schema.Table;
import net.sf.jsqlparser.statement.select.*;
>>>>>>>
import net.sf.jsqlparser.statement.select.*;
<<<<<<<
for (WithItem withItem : select.getWithItemsList())
withItem.accept(selectVisitor);
=======
for (WithItem withItem : select.getWithItemsList()) {
withItem.accept(this);
}
}
select.getSelectBody().accept(this);
if(unsupported && deepParsing)
throw new JSQLParserException("Query not yet supported");
return whereClause;
}
public void setWhereClause(Select selectQuery, SelectionJSQL whereClause) {
isSetting = true;
this.whereClause = whereClause;
selectQuery.getSelectBody().accept(this);
}
/*
* visit Plainselect, search for the where structure that returns an Expression
* Stored in SelectionJSQL.
* @see net.sf.jsqlparser.statement.select.SelectVisitor#visit(net.sf.jsqlparser.statement.select.PlainSelect)
*/
@Override
public void visit(PlainSelect plainSelect) {
/*
* First check if we are setting a new whereClause. Add the expression contained in the SelectionJSQL
* in the WHERE clause.
*/
//FROM (subselect) -> process
//plainSelect.getFromItem().accept(this);
if (isSetting) {
plainSelect.setWhere(whereClause.getRawConditions());
} else {
if (plainSelect.getWhere() != null) {
Expression where = plainSelect.getWhere();
whereClause = new SelectionJSQL();
whereClause.addCondition(where);
//we visit the where clause to remove quotes and fix any and all comparison
where.accept(this);
}
}
}
@Override
public void visit(SetOperationList setOpList) {
// we do not consider the case of UNION
setOpList.getPlainSelects().get(0).accept(this);
}
@Override
public void visit(WithItem withItem) {
// we do not consider the case for WITH
}
@Override
public void visit(NullValue nullValue) {
// we do not execute anything
}
@Override
public void visit(Function function) {
if(function.getName().toLowerCase().equals("regexp_like") ) {
for(Expression ex :function.getParameters().getExpressions()){
ex.accept(this);
}
}
else{
unsupported = true;
}
}
@Override
public void visit(JdbcParameter jdbcParameter) {
//we do not execute anything
}
@Override
public void visit(JdbcNamedParameter jdbcNamedParameter) {
// we do not execute anything
}
@Override
public void visit(DoubleValue doubleValue) {
// we do not execute anything
}
@Override
public void visit(LongValue longValue) {
// we do not execute anything
}
@Override
public void visit(DateValue dateValue) {
// we do not execute anything
}
@Override
public void visit(TimeValue timeValue) {
// we do not execute anything
}
@Override
public void visit(TimestampValue timestampValue) {
// we do not execute anything
}
@Override
public void visit(Parenthesis parenthesis) {
parenthesis.getExpression().accept(this);
}
@Override
public void visit(StringValue stringValue) {
// we do not execute anything
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.arithmetic.Addition)
*/
@Override
public void visit(Addition addition) {
visitBinaryExpression(addition);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.arithmetic.Division)
*/
@Override
public void visit(Division division) {
visitBinaryExpression(division);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.arithmetic.Multiplication)
*/
@Override
public void visit(Multiplication multiplication) {
visitBinaryExpression(multiplication);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.arithmetic.Subtraction)
*/
@Override
public void visit(Subtraction subtraction) {
visitBinaryExpression(subtraction);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.conditional.AndExpression)
*/
@Override
public void visit(AndExpression andExpression) {
visitBinaryExpression(andExpression);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.conditional.OrExpression)
*/
@Override
public void visit(OrExpression orExpression) {
visitBinaryExpression(orExpression);
}
@Override
public void visit(Between between) {
between.getLeftExpression().accept(this);
between.getBetweenExpressionStart().accept(this);
between.getBetweenExpressionEnd().accept(this);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.relational.EqualsTo)
*/
@Override
public void visit(EqualsTo equalsTo) {
visitBinaryExpression(equalsTo);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.relational.GreaterThan)
*/
@Override
public void visit(GreaterThan greaterThan) {
visitBinaryExpression(greaterThan);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.relational.GreaterThanEquals)
*/
@Override
public void visit(GreaterThanEquals greaterThanEquals) {
visitBinaryExpression(greaterThanEquals);
}
/*
* We add the content of the inExpression in SelectionJSQL
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.relational.InExpression)
*/
@Override
public void visit(InExpression inExpression) {
//Expression e = inExpression.getLeftExpression();
ItemsList e1 = inExpression.getLeftItemsList();
if (e1 instanceof SubSelect){
((SubSelect)e1).accept((ExpressionVisitor)this);
}
else if (e1 instanceof ExpressionList) {
for (Expression expr : ((ExpressionList)e1).getExpressions()) {
expr.accept(this);
}
}
else if (e1 instanceof MultiExpressionList) {
for (ExpressionList exp : ((MultiExpressionList)e1).getExprList()){
for (Expression expr : ((ExpressionList)exp).getExpressions()) {
expr.accept(this);
}
}
}
}
/*
* We add the content of isNullExpression in SelectionJSQL
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.relational.InExpression)
*/
@Override
public void visit(IsNullExpression isNullExpression) {
isNullExpression.getLeftExpression().accept(this);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.relational.InExpression)
*/
@Override
public void visit(LikeExpression likeExpression) {
visitBinaryExpression(likeExpression);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.relational.MinorThan)
*/
@Override
public void visit(MinorThan minorThan) {
visitBinaryExpression(minorThan);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.relational.MinorThanEquals)
*/
@Override
public void visit(MinorThanEquals minorThanEquals) {
visitBinaryExpression(minorThanEquals);
}
/*
* We do the same procedure for all Binary Expressions
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.expression.operators.relational.NotEqualsTo)
*/
@Override
public void visit(NotEqualsTo notEqualsTo) {
visitBinaryExpression(notEqualsTo);
}
/*
* Visit the column and remove the quotes if they are present(non-Javadoc)
* @see net.sf.jsqlparser.expression.ExpressionVisitor#visit(net.sf.jsqlparser.schema.Column)
*/
@Override
public void visit(Column tableColumn) {
Table table= tableColumn.getTable();
if(table.getName()!=null){
TableJSQL fixTable = new TableJSQL(table);
table.setAlias(fixTable.getAlias());
table.setName(fixTable.getTableName());
table.setSchemaName(fixTable.getSchema());
>>>>>>>
for (WithItem withItem : select.getWithItemsList())
withItem.accept(selectVisitor); |
<<<<<<<
import it.unibz.inf.ontop.model.term.TermFactory;
=======
import it.unibz.inf.ontop.iq.optimizer.MappingIQNormalizer;
>>>>>>>
import it.unibz.inf.ontop.iq.optimizer.MappingIQNormalizer;
import it.unibz.inf.ontop.model.term.TermFactory;
<<<<<<<
private final TermFactory termFactory;
=======
private final MappingIQNormalizer mappingIQNormalizer;
>>>>>>>
private final MappingIQNormalizer mappingIQNormalizer;
private final TermFactory termFactory;
<<<<<<<
private MappingMergerImpl(SpecificationFactory specificationFactory, UnionBasedQueryMerger queryMerger,
TermFactory termFactory) {
=======
private MappingMergerImpl(SpecificationFactory specificationFactory, UnionBasedQueryMerger queryMerger,
MappingIQNormalizer mappingIQNormalizer) {
>>>>>>>
private MappingMergerImpl(SpecificationFactory specificationFactory, UnionBasedQueryMerger queryMerger,
MappingIQNormalizer mappingIQNormalizer, TermFactory termFactory) {
<<<<<<<
this.termFactory = termFactory;
=======
this.mappingIQNormalizer = mappingIQNormalizer;
>>>>>>>
this.mappingIQNormalizer = mappingIQNormalizer;
this.termFactory = termFactory; |
<<<<<<<
public QuestOWLFactory createQueryOWLFactory(String mappingFile) throws Exception {
/*
* Creating the instance of the reasoner using the factory. Remember
* that the RDBMS that contains the data must be already running and
* accepting connections. The HelloWorld and Books tutorials at our wiki
* show you how to do this.
*/
QuestOWLFactory factory = new QuestOWLFactory(new File(mappingFile), createPreferences(mappingFile));
return factory;
}
=======
>>>>>>> |
<<<<<<<
private Constant getConstant(ProjectionElem node, TupleResultSet resSet)
throws OBDAException {
Constant constant = null;
String node_name = node.getSourceName();
ValueExpr ve = null;
if (extMap != null) {
ve = extMap.get(node_name);
if (ve != null && ve instanceof Var)
throw new RuntimeException("Invalid query. Found unbound variable: " + ve);
}
// if (node_name.charAt(0) == '-') {
if (ve instanceof org.eclipse.rdf4j.query.algebra.ValueConstant) {
org.eclipse.rdf4j.query.algebra.ValueConstant vc = (org.eclipse.rdf4j.query.algebra.ValueConstant) ve;
if (vc.getValue() instanceof IRI) {
constant = dfac.getConstantURI(vc.getValue().stringValue());
} else if (vc.getValue() instanceof Literal) {
constant = dfac.getConstantLiteral(vc.getValue().stringValue());
} else {
constant = dfac.getConstantBNode(vc.getValue().stringValue());
}
} else {
constant = resSet.getConstant(node_name);
}
return constant;
}
@Override
=======
private Constant getConstant(ProjectionElem node, TupleResultSet resSet)
throws OBDAException {
Constant constant = null;
String node_name = node.getSourceName();
ValueExpr ve = null;
if (extMap!= null) {
ve = extMap.get(node_name);
if (ve!=null && ve instanceof Var)
throw new RuntimeException ("Invalid query. Found unbound variable: "+ve);
}
if (node_name.charAt(0) == '-') {
org.openrdf.query.algebra.ValueConstant vc = (org.openrdf.query.algebra.ValueConstant) ve;
if (vc.getValue() instanceof URIImpl) {
constant = DATA_FACTORY.getConstantURI(vc.getValue().stringValue());
} else if (vc.getValue() instanceof LiteralImpl) {
constant = DATA_FACTORY.getConstantLiteral(vc.getValue().stringValue());
} else {
constant = DATA_FACTORY.getConstantBNode(vc.getValue().stringValue());
}
} else {
constant = resSet.getConstant(node_name);
}
return constant;
}
@Override
>>>>>>>
private Constant getConstant(ProjectionElem node, TupleResultSet resSet)
throws OBDAException {
Constant constant = null;
String node_name = node.getSourceName();
ValueExpr ve = null;
if (extMap != null) {
ve = extMap.get(node_name);
if (ve != null && ve instanceof Var)
throw new RuntimeException("Invalid query. Found unbound variable: " + ve);
}
// if (node_name.charAt(0) == '-') {
if (ve instanceof org.eclipse.rdf4j.query.algebra.ValueConstant) {
org.eclipse.rdf4j.query.algebra.ValueConstant vc = (org.eclipse.rdf4j.query.algebra.ValueConstant) ve;
if (vc.getValue() instanceof IRI) {
constant = DATA_FACTORY.getConstantURI(vc.getValue().stringValue());
} else if (vc.getValue() instanceof Literal) {
constant = DATA_FACTORY.getConstantLiteral(vc.getValue().stringValue());
} else {
constant = DATA_FACTORY.getConstantBNode(vc.getValue().stringValue());
}
} else {
constant = resSet.getConstant(node_name);
}
return constant;
}
@Override |
<<<<<<<
private ImmutableSet<Variable> nullableVariables = null;
@Nullable
private Boolean isDistinct = null;
=======
private ImmutableSet<Variable> nullableVariables;
@Nullable
private ImmutableSet<ImmutableSubstitution<NonVariableTerm>> possibleVariableDefinitions;
>>>>>>>
private ImmutableSet<Variable> nullableVariables;
@Nullable
private Boolean isDistinct;
@Nullable
private ImmutableSet<ImmutableSubstitution<NonVariableTerm>> possibleVariableDefinitions;
<<<<<<<
public IQTree removeDistincts() {
IQProperties properties = getProperties();
return properties.areDistinctAlreadyRemoved()
? this
: getRootNode().removeDistincts(leftChild, rightChild, properties);
}
@Override
=======
public IQTree replaceSubTree(IQTree subTreeToReplace, IQTree newSubTree) {
if (equals(subTreeToReplace))
return newSubTree;
return iqFactory.createBinaryNonCommutativeIQTree(getRootNode(),
leftChild.replaceSubTree(subTreeToReplace, newSubTree),
rightChild.replaceSubTree(subTreeToReplace, newSubTree));
}
@Override
public ImmutableSet<ImmutableSubstitution<NonVariableTerm>> getPossibleVariableDefinitions() {
if (possibleVariableDefinitions == null)
possibleVariableDefinitions = getRootNode().getPossibleVariableDefinitions(leftChild, rightChild);
return possibleVariableDefinitions;
}
@Override
>>>>>>>
public IQTree removeDistincts() {
IQProperties properties = getProperties();
return properties.areDistinctAlreadyRemoved()
? this
: getRootNode().removeDistincts(leftChild, rightChild, properties);
}
@Override
public IQTree replaceSubTree(IQTree subTreeToReplace, IQTree newSubTree) {
if (equals(subTreeToReplace))
return newSubTree;
return iqFactory.createBinaryNonCommutativeIQTree(getRootNode(),
leftChild.replaceSubTree(subTreeToReplace, newSubTree),
rightChild.replaceSubTree(subTreeToReplace, newSubTree));
}
@Override
public ImmutableSet<ImmutableSubstitution<NonVariableTerm>> getPossibleVariableDefinitions() {
if (possibleVariableDefinitions == null)
possibleVariableDefinitions = getRootNode().getPossibleVariableDefinitions(leftChild, rightChild);
return possibleVariableDefinitions;
}
@Override |
<<<<<<<
private static final long serialVersionUID = 500873858691854474L;
=======
private final Predicate predicate;
>>>>>>>
private static final long serialVersionUID = 500873858691854474L;
<<<<<<<
private IRI iri;
// private final AnnotationPropertyDomainImpl domain;
// private final AnnotationPropertyRangeImpl range;
AnnotationPropertyImpl(String name) {
=======
// TODO: remove public access
public AnnotationPropertyImpl(String name) {
this.predicate = TERM_FACTORY.getAnnotationPropertyPredicate(name);
>>>>>>>
private IRI iri;
AnnotationPropertyImpl(String name) {
<<<<<<<
this.iri = new SimpleRDF().createIRI(name);
// this.domain = new AnnotationPropertyDomainImpl(this);
// this.range = new AnnotationPropertyRangeImpl(this);
=======
>>>>>>>
this.iri = new SimpleRDF().createIRI(name); |
<<<<<<<
: typeFactory.getOptionalDatatype(rdfFactory.createIRI(datatype.stringValue()))
.orElseGet(typeFactory::getUnsupportedDatatype);
c2 = termFactory.getRDFLiteralConstant(l.getLabel(), type);
=======
: typeFactory.getDatatype(rdfFactory.createIRI(datatype.stringValue()));
c2 = termFactory.getConstantLiteral(l.getLabel(), type);
>>>>>>>
: typeFactory.getDatatype(rdfFactory.createIRI(datatype.stringValue()));
c2 = termFactory.getRDFLiteralConstant(l.getLabel(), type); |
<<<<<<<
/**
* Constructor used in cases where unwrapping-with-name-change has been
* invoked and lookup indices need to be updated.
*/
protected BeanDeserializerBase(BeanDeserializerBase src,
UnwrappedPropertyHandler unwrapHandler, BeanPropertyMap renamedProperties,
boolean ignoreAllUnknown)
=======
protected BeanDeserializerBase(BeanDeserializerBase src, NameTransformer unwrapper)
>>>>>>>
/**
* Constructor used in cases where unwrapping-with-name-change has been
* invoked and lookup indices need to be updated.
*/
protected BeanDeserializerBase(BeanDeserializerBase src,
UnwrappedPropertyHandler unwrapHandler, BeanPropertyMap renamedProperties,
boolean ignoreAllUnknown)
<<<<<<<
=======
*
* @since 2.1
>>>>>>>
<<<<<<<
=======
*
* @since 2.2
>>>>>>>
<<<<<<<
=======
*
* @since 2.0
>>>>>>>
<<<<<<<
=======
*
* @since 2.0
>>>>>>>
<<<<<<<
null : _beanProperties.findDefinition(propertyName);
if (_neitherNull(prop, _propertyBasedCreator)) {
=======
null : _beanProperties.find(propertyName);
if (prop == null && _propertyBasedCreator != null) {
>>>>>>>
null : _beanProperties.findDefinition(propertyName);
if (prop == null && _propertyBasedCreator != null) {
<<<<<<<
=======
*
* @since 2.3
>>>>>>>
<<<<<<<
null : _beanProperties.findDefinition(propertyIndex);
if (_neitherNull(prop, _propertyBasedCreator)) {
=======
null : _beanProperties.find(propertyIndex);
if (prop == null && _propertyBasedCreator != null) {
>>>>>>>
null : _beanProperties.findDefinition(propertyIndex);
if (prop == null && _propertyBasedCreator != null) {
<<<<<<<
=======
/* Mutators
/**********************************************************
*/
/**
* Method that can be used to replace an existing property with
* a modified one.
*<p>
* NOTE: only ever use this method if you know what you are doing;
* incorrect usage can break deserializer.
*
* @param original Property to replace
* @param replacement Property to replace it with
*
* @since 2.1
*/
public void replaceProperty(SettableBeanProperty original,
SettableBeanProperty replacement)
{
_beanProperties.replace(original, replacement);
}
/*
/**********************************************************
>>>>>>>
<<<<<<<
=======
*
* @since 2.3
>>>>>>> |
<<<<<<<
import it.unibz.inf.ontop.iq.node.normalization.AscendingSubstitutionNormalizer;
import it.unibz.inf.ontop.iq.node.normalization.AscendingSubstitutionNormalizer.AscendingSubstitutionNormalization;
import it.unibz.inf.ontop.iq.transform.IQTransformer;
=======
import it.unibz.inf.ontop.iq.transform.IQTreeVisitingTransformer;
>>>>>>>
import it.unibz.inf.ontop.iq.transform.IQTreeVisitingTransformer;
import it.unibz.inf.ontop.iq.node.normalization.AscendingSubstitutionNormalizer;
import it.unibz.inf.ontop.iq.node.normalization.AscendingSubstitutionNormalizer.AscendingSubstitutionNormalization; |
<<<<<<<
import it.unibz.inf.ontop.datalog.DatalogFactory;
import it.unibz.inf.ontop.dbschema.*;
=======
import it.unibz.inf.ontop.datalog.Datalog2QueryMappingConverter;
import it.unibz.inf.ontop.datalog.IntermediateQuery2DatalogTranslator;
import it.unibz.inf.ontop.datalog.impl.IntermediateQuery2DatalogTranslatorImpl;
import it.unibz.inf.ontop.injection.OntopMappingSettings;
>>>>>>>
import it.unibz.inf.ontop.datalog.Datalog2QueryMappingConverter;
import it.unibz.inf.ontop.datalog.DatalogFactory;
import it.unibz.inf.ontop.datalog.IntermediateQuery2DatalogTranslator;
import it.unibz.inf.ontop.dbschema.*;
import it.unibz.inf.ontop.injection.OntopMappingSettings;
<<<<<<<
import it.unibz.inf.ontop.model.type.TypeFactory;
import it.unibz.inf.ontop.spec.mapping.transformer.MappingNormalizer;
import it.unibz.inf.ontop.substitution.SubstitutionFactory;
import it.unibz.inf.ontop.substitution.impl.SubstitutionUtilities;
import it.unibz.inf.ontop.substitution.impl.UnifierUtilities;
=======
import it.unibz.inf.ontop.dbschema.DBMetadataTestingTools;
import it.unibz.inf.ontop.spec.mapping.MappingMetadata;
import it.unibz.inf.ontop.spec.mapping.PrefixManager;
import it.unibz.inf.ontop.spec.mapping.transformer.*;
import it.unibz.inf.ontop.spec.mapping.transformer.impl.DefaultMappingTransformer;
import java.util.stream.Stream;
>>>>>>>
import it.unibz.inf.ontop.model.type.TypeFactory;
import it.unibz.inf.ontop.spec.mapping.MappingMetadata;
import it.unibz.inf.ontop.spec.mapping.PrefixManager;
import it.unibz.inf.ontop.spec.mapping.transformer.*;
import it.unibz.inf.ontop.substitution.SubstitutionFactory;
import it.unibz.inf.ontop.substitution.impl.SubstitutionUtilities;
import it.unibz.inf.ontop.substitution.impl.UnifierUtilities;
import java.util.stream.Stream;
<<<<<<<
private static final BasicDBMetadata DEFAULT_DUMMY_DB_METADATA;
=======
public static final Datalog2QueryMappingConverter DATALOG_2_QUERY_MAPPING_CONVERTER;
public static final DefaultMappingTransformer DEFAULT_MAPPING_TRANSFORMER;
public static final MappingCanonicalRewriter MAPPING_CANONICAL_REWRITER;
public static final MappingSaturator MAPPING_SATURATOR;
public static final ABoxFactIntoMappingConverter A_BOX_FACT_INTO_MAPPING_CONVERTER;
public static final MappingMerger MAPPING_MERGER;
public static final OntopMappingSettings ONTOP_MAPPING_SETTINGS;
public static final MappingSameAsInverseRewriter SAME_AS_INVERSE_REWRITER;
public static final MappingEquivalenceFreeRewriter EQUIVALENCE_FREE_REWRITER;
public static final IntermediateQuery2DatalogTranslator INTERMEDIATE_QUERY_2_DATALOG_TRANSLATOR;
public static final PrefixManager EMPTY_PREFIX_MANAGER;
public static final UriTemplateMatcher EMPTY_URI_TEMPLATE_MATCHER;
public static final MappingMetadata EMPTY_MAPPING_METADATA;
>>>>>>>
private static final BasicDBMetadata DEFAULT_DUMMY_DB_METADATA;
<<<<<<<
ATOM_FACTORY = injector.getInstance(AtomFactory.class);
TERM_FACTORY = injector.getInstance(TermFactory.class);
TYPE_FACTORY = injector.getInstance(TypeFactory.class);
DATALOG_FACTORY = injector.getInstance(DatalogFactory.class);
SUBSTITUTION_FACTORY = injector.getInstance(SubstitutionFactory.class);
RELATION_2_PREDICATE = injector.getInstance(Relation2Predicate.class);
DEFAULT_DUMMY_DB_METADATA = injector.getInstance(DummyBasicDBMetadata.class);
SUBSTITUTION_UTILITIES = injector.getInstance(SubstitutionUtilities.class);
UNIFIER_UTILITIES = injector.getInstance(UnifierUtilities.class);
EMPTY_METADATA = DEFAULT_DUMMY_DB_METADATA.clone();
EMPTY_METADATA.freeze();
=======
DATALOG_2_QUERY_MAPPING_CONVERTER = injector.getInstance(Datalog2QueryMappingConverter.class);
EMPTY_URI_TEMPLATE_MATCHER = UriTemplateMatcher.create(Stream.of());
EMPTY_PREFIX_MANAGER = MAPPING_FACTORY.createPrefixManager(ImmutableMap.of());
EMPTY_MAPPING_METADATA = MAPPING_FACTORY.createMetadata(EMPTY_PREFIX_MANAGER, EMPTY_URI_TEMPLATE_MATCHER);
DEFAULT_MAPPING_TRANSFORMER = injector.getInstance(DefaultMappingTransformer.class);
MAPPING_CANONICAL_REWRITER = injector.getInstance(MappingCanonicalRewriter.class);
MAPPING_SATURATOR= injector.getInstance(MappingSaturator.class);
A_BOX_FACT_INTO_MAPPING_CONVERTER = injector.getInstance(ABoxFactIntoMappingConverter.class);
ONTOP_MAPPING_SETTINGS = injector.getInstance(OntopMappingSettings.class);
MAPPING_MERGER = injector.getInstance(MappingMerger.class);
SAME_AS_INVERSE_REWRITER = injector.getInstance(MappingSameAsInverseRewriter.class);
EQUIVALENCE_FREE_REWRITER = injector.getInstance(MappingEquivalenceFreeRewriter.class);
INTERMEDIATE_QUERY_2_DATALOG_TRANSLATOR = injector.getInstance(IntermediateQuery2DatalogTranslator.class);
>>>>>>>
ATOM_FACTORY = injector.getInstance(AtomFactory.class);
TERM_FACTORY = injector.getInstance(TermFactory.class);
TYPE_FACTORY = injector.getInstance(TypeFactory.class);
DATALOG_FACTORY = injector.getInstance(DatalogFactory.class);
SUBSTITUTION_FACTORY = injector.getInstance(SubstitutionFactory.class);
RELATION_2_PREDICATE = injector.getInstance(Relation2Predicate.class);
DEFAULT_DUMMY_DB_METADATA = injector.getInstance(DummyBasicDBMetadata.class);
DATALOG_2_QUERY_MAPPING_CONVERTER = injector.getInstance(Datalog2QueryMappingConverter.class);
A_BOX_FACT_INTO_MAPPING_CONVERTER = injector.getInstance(ABoxFactIntoMappingConverter.class);
ONTOP_MAPPING_SETTINGS = injector.getInstance(OntopMappingSettings.class);
MAPPING_MERGER = injector.getInstance(MappingMerger.class);
SAME_AS_INVERSE_REWRITER = injector.getInstance(MappingSameAsInverseRewriter.class);
EQUIVALENCE_FREE_REWRITER = injector.getInstance(MappingEquivalenceFreeRewriter.class);
INTERMEDIATE_QUERY_2_DATALOG_TRANSLATOR = injector.getInstance(IntermediateQuery2DatalogTranslator.class);
MAPPING_SATURATOR = injector.getInstance(MappingSaturator.class);
EMPTY_URI_TEMPLATE_MATCHER = UriTemplateMatcher.create(Stream.of(), TERM_FACTORY);
EMPTY_PREFIX_MANAGER = MAPPING_FACTORY.createPrefixManager(ImmutableMap.of());
EMPTY_MAPPING_METADATA = MAPPING_FACTORY.createMetadata(EMPTY_PREFIX_MANAGER, EMPTY_URI_TEMPLATE_MATCHER);
SUBSTITUTION_UTILITIES = injector.getInstance(SubstitutionUtilities.class);
UNIFIER_UTILITIES = injector.getInstance(UnifierUtilities.class);
EMPTY_METADATA = DEFAULT_DUMMY_DB_METADATA.clone();
EMPTY_METADATA.freeze(); |
<<<<<<<
String newSourceQuery = getInstantiatedSQL(m.source.getSQLQuery(), templateColumns, values);
IRIConstant predicateTerm = termFactory.getConstantIRI(
rdfFactory.createIRI(getPredicateName(templateAtom.getTerm(0), values)));
=======
// Cannot build an IRI out of nulls
if (values.contains(null))
continue;
String newSourceQuery = getInstantiatedSQL(m.source.getSQLQuery(), newColumns, templateColumns, values);
ImmutableList<Variable> templateVariables = templateAtom.getTerms().stream()
.filter(t -> t instanceof Variable)
.map(v -> (Variable) v)
.distinct()
.collect(ImmutableCollectors.toList());
// Transforms the result set into a substitution
ImmutableSubstitution<ValueConstant> valueSubstitution = substitutionFactory.getSubstitution(
IntStream.range(0, values.size())
.boxed()
.collect(ImmutableCollectors.toMap(
templateVariables::get,
i -> termFactory.getConstantLiteral(values.get(i)))));
// In a mapping assertion, we create ground terms instead of constants for IRIs
// (so as to guarantee that RDF functions can ALWAYS be lifted after unfolding)
GroundFunctionalTerm predicateTerm = (GroundFunctionalTerm) termFactory.getImmutableUriTemplate(
termFactory.getConstantLiteral(getPredicateName(templateAtom.getTerm(0), values)));
>>>>>>>
// Cannot build an IRI out of nulls
if (values.contains(null))
continue;
String newSourceQuery = getInstantiatedSQL(m.source.getSQLQuery(), templateColumns, values);
IRIConstant predicateTerm = termFactory.getConstantIRI(
rdfFactory.createIRI(getPredicateName(templateAtom.getTerm(0), values))); |
<<<<<<<
import it.unibz.inf.ontop.model.type.*;
=======
import it.unibz.inf.ontop.model.type.NumericRDFDatatype;
import it.unibz.inf.ontop.model.type.RDFDatatype;
import it.unibz.inf.ontop.model.type.TypeFactory;
import it.unibz.inf.ontop.model.vocabulary.OntopInternal;
>>>>>>>
import it.unibz.inf.ontop.model.type.*;
<<<<<<<
import it.unibz.inf.ontop.substitution.ImmutableSubstitution;
import it.unibz.inf.ontop.substitution.impl.ImmutableUnificationTools;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
=======
import it.unibz.inf.ontop.substitution.Substitution;
import it.unibz.inf.ontop.substitution.impl.UnifierUtilities;
import org.apache.commons.rdf.api.RDF;
>>>>>>>
import it.unibz.inf.ontop.substitution.ImmutableSubstitution;
import it.unibz.inf.ontop.substitution.impl.ImmutableUnificationTools;
import it.unibz.inf.ontop.utils.ImmutableCollectors;
import org.apache.commons.rdf.api.RDF;
<<<<<<<
private final RDFTermTypeConstant iriConstant, bnodeConstant;
=======
private final RDF rdfFactory;
>>>>>>>
private final RDFTermTypeConstant iriConstant, bnodeConstant;
private final RDF rdfFactory;
<<<<<<<
ImmutableUnificationTools unificationTools, ExpressionNormalizer normalizer,
ImmutabilityTools immutabilityTools) {
=======
UnifierUtilities unifierUtilities, ExpressionNormalizer normalizer,
ImmutabilityTools immutabilityTools, RDF rdfFactory) {
>>>>>>>
ImmutableUnificationTools unificationTools, ExpressionNormalizer normalizer,
ImmutabilityTools immutabilityTools, RDF rdfFactory) {
<<<<<<<
this.iriConstant = termFactory.getRDFTermTypeConstant(typeFactory.getIRITermType());
this.bnodeConstant = termFactory.getRDFTermTypeConstant(typeFactory.getBlankNodeType());
=======
this.rdfFactory = rdfFactory;
>>>>>>>
this.iriConstant = termFactory.getRDFTermTypeConstant(typeFactory.getIRITermType());
this.bnodeConstant = termFactory.getRDFTermTypeConstant(typeFactory.getBlankNodeType());
this.rdfFactory = rdfFactory;
<<<<<<<
return Optional.empty();
}
=======
throw new RuntimeException("Unexpected term type: " + term);
}
}
private boolean isDouble(Predicate pred) {
return (pred.equals(termFactory.getRequiredTypePredicate(XSD.DOUBLE))
|| pred.equals(termFactory.getRequiredTypePredicate(XSD.FLOAT)));
}
private boolean isNumeric(Predicate pred) {
return (pred instanceof DatatypePredicate)
&& (((DatatypePredicate) pred).getReturnedType() instanceof NumericRDFDatatype);
}
private boolean isNumeric(ValueConstant constant) {
String constantValue = constant.getValue();
RDFDatatype type = typeFactory.getDatatype(rdfFactory.createIRI(constantValue));
return type.isA(OntopInternal.NUMERIC);
>>>>>>>
return Optional.empty();
}
<<<<<<<
if (functionSymbol1.equals(pred2)) {
if (functionSymbol1 instanceof IRIStringTemplateFunctionSymbol) {
=======
if (pred1.equals(pred2)) {
if (pred1 instanceof URITemplatePredicate) {
>>>>>>>
if (functionSymbol1.equals(pred2)) {
if (functionSymbol1 instanceof IRIStringTemplateFunctionSymbol) {
<<<<<<<
return new ExpressionEvaluator(datalogTools, termFactory, typeFactory, unificationTools, normalizer, immutabilityTools);
=======
return new ExpressionEvaluator(datalogTools, termFactory, typeFactory, unifierUtilities, normalizer,
immutabilityTools, rdfFactory);
>>>>>>>
return new ExpressionEvaluator(datalogTools, termFactory, typeFactory, unificationTools, normalizer, immutabilityTools, rdfFactory); |
<<<<<<<
=======
import it.unibz.inf.ontop.io.ModelIOManager;
import it.unibz.inf.ontop.model.OBDADataFactory;
import it.unibz.inf.ontop.model.OBDAModel;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
>>>>>>>
import it.unibz.inf.ontop.model.OBDADataFactory;
import it.unibz.inf.ontop.model.OBDAModel;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
<<<<<<<
import it.unibz.inf.ontop.owlrefplatform.core.SQLExecutableQuery;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWL;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLConnection;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLFactory;
import it.unibz.inf.ontop.owlrefplatform.owlapi3.QuestOWLStatement;
import java.io.File;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
=======
import it.unibz.inf.ontop.owlrefplatform.owlapi.*;
>>>>>>>
import it.unibz.inf.ontop.owlrefplatform.core.SQLExecutableQuery;
import it.unibz.inf.ontop.owlrefplatform.owlapi.*;
<<<<<<<
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
OWLOntology ontology = manager.loadOntologyFromOntologyDocument(new File(owlfile));
Properties pref = new Properties();
pref.put(QuestPreferences.DBTYPE, QuestConstants.SEMANTIC_INDEX);
pref.put(QuestPreferences.ABOX_MODE, QuestConstants.CLASSIC);
pref.put(QuestPreferences.REFORMULATION_TECHNIQUE, QuestConstants.TW);
pref.put(QuestPreferences.REWRITE, QuestConstants.TRUE);
QuestOWLFactory factory = new QuestOWLFactory(new File(obdafile), new QuestPreferences(pref));
QuestOWL reasoner = factory.createReasoner(ontology, new SimpleConfiguration());
QuestOWLConnection qconn = reasoner.getConnection();
QuestOWLStatement st = qconn.createStatement();
String unfolding = ((SQLExecutableQuery)st.getExecutableQuery(query)).getSQL();
st.close();
reasoner.dispose();
return unfolding;
}
/**
* constructs directly the unfolding
*
* @param query
* @return
* @throws Exception
*/
private String getNPDUnfolding(String query) throws Exception {
=======
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
OWLOntology ontology = manager.loadOntologyFromOntologyDocument(new File(owlfile));
OBDADataFactory fac = OBDADataFactoryImpl.getInstance();
OBDAModel obdaModel = fac.getOBDAModel();
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load(obdafile);
QuestPreferences pref = new QuestPreferences();
pref.setCurrentValueOf(QuestPreferences.DBTYPE, QuestConstants.SEMANTIC_INDEX);
pref.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.CLASSIC);
pref.setCurrentValueOf(QuestPreferences.REFORMULATION_TECHNIQUE, QuestConstants.TW);
pref.setCurrentValueOf(QuestPreferences.REWRITE, QuestConstants.TRUE);
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder().obdaModel(obdaModel).preferences(pref).build();
QuestOWL reasoner = factory.createReasoner(ontology, config);
QuestOWLConnection qconn = reasoner.getConnection();
QuestOWLStatement st = qconn.createStatement();
String unfolding = st.getUnfolding(query);
st.close();
reasoner.dispose();
return unfolding;
}
/**
* constructs directly the unfolding
*
* @param query
* @return
* @throws Exception
*/
private String getNPDUnfolding(String query) throws Exception {
>>>>>>>
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
OWLOntology ontology = manager.loadOntologyFromOntologyDocument(new File(owlfile));
Properties pref = new Properties();
pref.put(QuestPreferences.DBTYPE, QuestConstants.SEMANTIC_INDEX);
pref.put(QuestPreferences.ABOX_MODE, QuestConstants.CLASSIC);
pref.put(QuestPreferences.REFORMULATION_TECHNIQUE, QuestConstants.TW);
pref.put(QuestPreferences.REWRITE, QuestConstants.TRUE);
QuestOWLFactory factory = new QuestOWLFactory();
QuestOWLConfiguration config = QuestOWLConfiguration.builder()
.nativeOntopMappingFile(obdafile).properties(pref).build();
QuestOWL reasoner = factory.createReasoner(ontology, config);
QuestOWLConnection qconn = reasoner.getConnection();
QuestOWLStatement st = qconn.createStatement();
String unfolding = ((SQLExecutableQuery)st.getExecutableQuery(query)).getSQL();
st.close();
reasoner.dispose();
return unfolding;
}
/**
* constructs directly the unfolding
*
* @param query
* @return
* @throws Exception
*/
private String getNPDUnfolding(String query) throws Exception { |
<<<<<<<
import it.unibz.krdb.obda.model.BuiltinPredicate;
=======
>>>>>>>
<<<<<<<
import it.unibz.krdb.obda.ontology.PropertyExpression;
import it.unibz.krdb.obda.ontology.SomeValuesFrom;
import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.CQContainmentCheckUnderLIDs;
import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.EQNormalizer;
import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.Unifier;
import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.UnifierUtilities;
=======
import it.unibz.krdb.obda.ontology.ObjectPropertyExpression;
import it.unibz.krdb.obda.ontology.ObjectSomeValuesFrom;
import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.*;
>>>>>>>
import it.unibz.krdb.obda.ontology.ObjectPropertyExpression;
import it.unibz.krdb.obda.ontology.ObjectSomeValuesFrom;
import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.*;
<<<<<<<
/***
* Given a mappings in {@link currentMapping}, this method will
* return a new mappings in which no constants appear in the body of
* database predicates. This is done by replacing the constant occurrence
* with a fresh variable, and adding a new equality condition to the body of
* the mapping.
* <p/>
*
* For example, let the mapping m be
* <p/>
* A(x) :- T(x,y,22)
*
* <p>
* Then this method will replace m by the mapping m'
* <p>
* A(x) :- T(x,y,z), EQ(z,22)
*
* @param currentMapping
* @return a new CQ that has been normalized in the way described above.
*/
private static CQIE normalizeConstants(CQIE currentMapping) {
int freshVarCount = 0;
List<Function> newBody = new LinkedList<Function>();
for (Function currentAtom : currentMapping.getBody()) {
if (!(currentAtom.getPredicate() instanceof BuiltinPredicate)) {
Function clone = (Function)currentAtom.clone();
for (int i = 0; i < clone.getTerms().size(); i++) {
Term term = clone.getTerm(i);
if (term instanceof Constant) {
// Found a constant, replacing with a fresh variable
// and adding the new equality atom.
freshVarCount++;
Variable freshVariable = fac.getVariable("?FreshVar" + freshVarCount);
newBody.add(fac.getFunctionEQ(freshVariable, term));
clone.setTerm(i, freshVariable);
}
}
newBody.add(clone);
} else {
newBody.add((Function)currentAtom.clone());
}
}
Function head = (Function)currentMapping.getHead().clone();
return fac.getCQIE(head, newBody);
}
=======
>>>>>>>
<<<<<<<
else
throw new RuntimeException("Unknown type of node in DAG: " + childDescription);
for (CQIE childmapping : originalMappings) {
if (!childmapping.getHead().getFunctionSymbol().equals(childPredicate))
continue;
=======
else {
assert (childDescription instanceof DataSomeValuesFrom);
DataPropertyExpression some = ((DataSomeValuesFrom) childDescription).getProperty();
childPredicate = some.getPredicate();
isClass = false;
isInverse = false; // can never be an inverse
}
List<TMappingRule> childmappings = originalMappingIndex.get(childPredicate);
if (childmappings == null)
continue;
for (TMappingRule childmapping : childmappings) {
>>>>>>>
else {
assert (childDescription instanceof DataSomeValuesFrom);
DataPropertyExpression some = ((DataSomeValuesFrom) childDescription).getProperty();
childPredicate = some.getPredicate();
isClass = false;
isInverse = false; // can never be an inverse
}
List<TMappingRule> childmappings = originalMappingIndex.get(childPredicate);
if (childmappings == null)
continue;
for (TMappingRule childmapping : childmappings) { |
<<<<<<<
=======
import org.semanticweb.ontop.exception.InvalidMappingException;
import org.semanticweb.ontop.exception.InvalidPredicateDeclarationException;
import org.semanticweb.ontop.io.ModelIOManager;
>>>>>>>
import org.semanticweb.ontop.exception.DuplicateMappingException;
import org.semanticweb.ontop.exception.InvalidMappingException;
import org.semanticweb.ontop.exception.InvalidPredicateDeclarationException;
import org.semanticweb.ontop.io.InvalidDataSourceException;
<<<<<<<
=======
* Load the OBDA model from an externa
* l .obda file
*/
OBDADataFactory fac = OBDADataFactoryImpl.getInstance();
OBDAModel obdaModel = fac.getOBDAModel();
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load(obdaFile);
/*
>>>>>>>
<<<<<<<
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
// TEST preference.setCurrentValueOf(QuestPreferences.T_MAPPINGS, QuestConstants.FALSE); // Disable T_Mappings
ImplicitDBConstraints constr = new ImplicitDBConstraints(usrConstrinFile);
p.put(QuestPreferences.DB_CONSTRAINTS, constr);
=======
QuestPreferences preference = new QuestPreferences();
preference.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
// TEST preference.setCurrentValueOf(QuestPreferences.T_MAPPINGS, QuestConstants.FALSE); // Disable T_Mappings
>>>>>>>
Properties p = new Properties();
p.setProperty(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
// TEST preference.setCurrentValueOf(QuestPreferences.T_MAPPINGS, QuestConstants.FALSE); // Disable T_Mappings
if (manualKeys) {
ImplicitDBConstraints constr = new ImplicitDBConstraints(usrConstrinFile);
p.put(QuestPreferences.DB_CONSTRAINTS, constr);
} |
<<<<<<<
Multimap<Predicate, List<Integer>> pkeys = HashMultimap.create();
=======
Map<Predicate, List<Integer>> pkeys = new HashMap<>();
>>>>>>>
Multimap<Predicate, List<Integer>> pkeys = HashMultimap.create();
<<<<<<<
// primary keys
List<Integer> pkeyIdx = new LinkedList<Integer>();
for (int columnidx = 1; columnidx <= def.getNumOfAttributes(); columnidx++) {
=======
List<Integer> pkeyIdx = new LinkedList<>();
for (int columnidx = 1; columnidx <= def.getAttributes().size(); columnidx++) {
>>>>>>>
// primary keys
List<Integer> pkeyIdx = new LinkedList<>();
for (int columnidx = 1; columnidx <= def.getAttributes().size(); columnidx++) {
<<<<<<<
// unique constraints
for (int columnidx = 1; columnidx <= def.getNumOfAttributes(); columnidx++) {
Attribute column = def.getAttribute(columnidx);
if (column.isUnique()) {
pkeys.put(newatom.getFunctionSymbol(), ImmutableList.of(columnidx));
}
}
=======
if (!pkeyIdx.isEmpty())
pkeys.put(newAtomPredicate, pkeyIdx);
>>>>>>>
if (!pkeyIdx.isEmpty()) {
pkeys.put(newAtomPredicate, pkeyIdx);
}
// unique constraints
for (int columnidx = 1; columnidx <= def.getAttributes().size(); columnidx++) {
Attribute column = def.getAttribute(columnidx);
if (column.isUnique()) {
pkeys.put(newAtomPredicate, ImmutableList.of(columnidx));
}
} |
<<<<<<<
package it.unibz.krdb.obda.owlrefplatform.core.dagjgrapht;
import it.unibz.krdb.obda.ontology.Description;
import it.unibz.krdb.obda.ontology.Ontology;
import java.util.Set;
/**
* This is the interface for the class TBoxReasoner where we are able to retrieve all the connection built in our DAG
*
*
*/
public interface TBoxReasoner {
public Set<Set<Description>> getDirectChildren(Description desc, boolean named);
public Set<Set<Description>> getDirectParents(Description desc, boolean named);
public Set<Set<Description>> getDescendants(Description desc, boolean named);
public Set<Set<Description>> getAncestors(Description desc, boolean named);
public Set<Description> getEquivalences(Description desc, boolean named);
public DAG getDAG ();
public Ontology getSigmaOntology();
}
=======
/*
* Copyright (C) 2009-2013, Free University of Bozen Bolzano
* This source code is available under the terms of the Affero General Public
* License v3.
*
* Please see LICENSE.txt for full license terms, including the availability of
* proprietary exceptions.
*/
package it.unibz.krdb.obda.owlrefplatform.core.dagjgrapht;
import it.unibz.krdb.obda.ontology.Description;
import java.util.Set;
/**
* This is the interface for the class TBoxReasoner where we are able to retrieve all the connection built in our DAG
*
*
*/
public interface TBoxReasoner {
public Set<Set<Description>> getDirectChildren(Description desc);
public Set<Set<Description>> getDirectParents(Description desc);
public Set<Set<Description>> getDescendants(Description desc);
public Set<Set<Description>> getAncestors(Description desc);
public Set<Description> getEquivalences(Description description);
public void getNode();
}
>>>>>>>
/*
* Copyright (C) 2009-2013, Free University of Bozen Bolzano
* This source code is available under the terms of the Affero General Public
* License v3.
*
* Please see LICENSE.txt for full license terms, including the availability of
* proprietary exceptions.
*/
package it.unibz.krdb.obda.owlrefplatform.core.dagjgrapht;
import it.unibz.krdb.obda.ontology.Description;
import it.unibz.krdb.obda.ontology.Ontology;
import java.util.Set;
/**
* This is the interface for the class TBoxReasoner where we are able to retrieve all the connection built in our DAG
*
*
*/
public interface TBoxReasoner {
public Set<Set<Description>> getDirectChildren(Description desc, boolean named);
public Set<Set<Description>> getDirectParents(Description desc, boolean named);
public Set<Set<Description>> getDescendants(Description desc, boolean named);
public Set<Set<Description>> getAncestors(Description desc, boolean named);
public Set<Description> getEquivalences(Description desc, boolean named);
public DAG getDAG ();
public Ontology getSigmaOntology();
} |
<<<<<<<
import xyz.elevated.frequency.check.impl.fly.*;
=======
import xyz.elevated.frequency.check.impl.fly.FlyA;
import xyz.elevated.frequency.check.impl.fly.FlyB;
import xyz.elevated.frequency.check.impl.fly.FlyC;
import xyz.elevated.frequency.check.impl.fly.FlyD;
import xyz.elevated.frequency.check.impl.hitbox.HitboxA;
>>>>>>>
import xyz.elevated.frequency.check.impl.fly.*;
import xyz.elevated.frequency.check.impl.hitbox.HitboxA;
<<<<<<<
.put(KillAuraG.class, new KillAuraG(playerData))
=======
.put(HitboxA.class, new HitboxA(playerData))
>>>>>>>
.put(KillAuraG.class, new KillAuraG(playerData))
.put(HitboxA.class, new HitboxA(playerData)) |
<<<<<<<
RESPONSE_TYPE_NOT_FOUND(BAD_REQUEST.value(), "response_type not found", BadRequestException.class),
ROLE_ALREADY_EXIST(BAD_REQUEST.value(), "Role already exist!", BadRequestException.class);
=======
RESPONSE_TYPE_NOT_FOUND(BAD_REQUEST.value(), "response_type not found", BadRequestException.class),
CIRCUIT_BREAK_ACTIVE(SERVICE_UNAVAILABLE.value(), "Circuit break enabled", ServerErrorException.class);
>>>>>>>
RESPONSE_TYPE_NOT_FOUND(BAD_REQUEST.value(), "response_type not found", BadRequestException.class),
ROLE_ALREADY_EXIST(BAD_REQUEST.value(), "Role already exist!", BadRequestException.class),
CIRCUIT_BREAK_ACTIVE(SERVICE_UNAVAILABLE.value(), "Circuit break enabled", ServerErrorException.class); |
<<<<<<<
RESPONSE_TYPE_NOT_FOUND(BAD_REQUEST.value(), "response_type not found", BadRequestException.class),
CORS_INTERCEPTOR_NOT_API_LIFE_CYCLE(BAD_REQUEST.value(), "The CORS Interceptor must of Lifecycle type API", BadRequestException.class),
CORS_INTERCEPTOR_ALREADY_ASSIGNED_TO_THIS_API(BAD_REQUEST.value(), "A CORS Interceptor already assigned to this API", BadRequestException.class);
=======
RESPONSE_TYPE_NOT_FOUND(BAD_REQUEST.value(), "response_type not found", BadRequestException.class),
ROLE_ALREADY_EXIST(BAD_REQUEST.value(), "Role already exist!", BadRequestException.class),
CIRCUIT_BREAK_ACTIVE(SERVICE_UNAVAILABLE.value(), "Circuit break enabled", ServerErrorException.class),
SCOPE_INVALID_OPERATION(BAD_REQUEST.value(), "Operation with id '{}' does not exist", BadRequestException.class),
SCOPE_INVALID_PLAN(BAD_REQUEST.value(), "Plan id with '{}' does not exist", BadRequestException.class),
SCOPE_OPERATION_NOT_IN_API(BAD_REQUEST.value(), "Operation '{}' not in Api '{}'", BadRequestException.class),
SCOPE_PLAN_NOT_IN_API(BAD_REQUEST.value(), "Plan '{}' not in Api '{}'", BadRequestException.class),
SCOPE_INVALID_NAME(BAD_REQUEST.value(), "A Scope with the provided name already exists", BadRequestException.class),
SCOPE_NO_OPERATION_FOUND(BAD_REQUEST.value(), "A Scope must have at least one Operation", BadRequestException.class),
;
>>>>>>>
RESPONSE_TYPE_NOT_FOUND(BAD_REQUEST.value(), "response_type not found", BadRequestException.class),
ROLE_ALREADY_EXIST(BAD_REQUEST.value(), "Role already exist!", BadRequestException.class),
CIRCUIT_BREAK_ACTIVE(SERVICE_UNAVAILABLE.value(), "Circuit break enabled", ServerErrorException.class),
SCOPE_INVALID_OPERATION(BAD_REQUEST.value(), "Operation with id '{}' does not exist", BadRequestException.class),
SCOPE_INVALID_PLAN(BAD_REQUEST.value(), "Plan id with '{}' does not exist", BadRequestException.class),
SCOPE_OPERATION_NOT_IN_API(BAD_REQUEST.value(), "Operation '{}' not in Api '{}'", BadRequestException.class),
SCOPE_PLAN_NOT_IN_API(BAD_REQUEST.value(), "Plan '{}' not in Api '{}'", BadRequestException.class),
SCOPE_INVALID_NAME(BAD_REQUEST.value(), "A Scope with the provided name already exists", BadRequestException.class),
SCOPE_NO_OPERATION_FOUND(BAD_REQUEST.value(), "A Scope must have at least one Operation", BadRequestException.class),
CORS_INTERCEPTOR_NOT_API_LIFE_CYCLE(BAD_REQUEST.value(), "The CORS Interceptor must of Lifecycle type API", BadRequestException.class),
CORS_INTERCEPTOR_ALREADY_ASSIGNED_TO_THIS_API(BAD_REQUEST.value(), "A CORS Interceptor already assigned to this API", BadRequestException.class); |
<<<<<<<
=======
/**
* Helper method for detecting Java14-added new {@code Record} types
*
* @since 2.12
*/
public static boolean isRecordType(Class<?> cls) {
Class<?> parent = cls.getSuperclass();
return (parent != null) && "java.lang.Record".equals(parent.getName());
}
/**
* @since 2.7
*/
>>>>>>>
/**
* Helper method for detecting Java14-added new {@code Record} types
*/
public static boolean isRecordType(Class<?> cls) {
Class<?> parent = cls.getSuperclass();
return (parent != null) && "java.lang.Record".equals(parent.getName());
} |
<<<<<<<
package br.com.conductor.heimdall.core.dto.persist;
/*-
* =========================LICENSE_START==================================
* heimdall-core
* ========================================================================
* Copyright (C) 2018 Conductor Tecnologia SA
* ========================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ==========================LICENSE_END===================================
*/
import java.io.Serializable;
import java.time.LocalDateTime;
import java.util.List;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import br.com.conductor.heimdall.core.dto.ReferenceIdDTO;
import br.com.conductor.heimdall.core.entity.AccessToken;
import br.com.conductor.heimdall.core.enums.Status;
import lombok.Data;
/**
* Class that represents the persist for a {@link AccessToken}.
*
* @author Filipe Germano
*
*/
@Data
public class AccessTokenPersist implements Serializable {
private static final long serialVersionUID = -9130167171077204284L;
@Size(max = 250)
private String code;
@NotNull
private ReferenceIdDTO app;
private LocalDateTime expiredDate;
@NotNull
@Size(min = 1)
private List<ReferenceIdDTO> plans;
private Status status;
}
=======
==================LICENSE_START==================================
* heimdall-core
* ========================================================================
* Copyright (C) 2018 Conductor Tecnologia SA
* ========================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ==========================LICENSE_END===================================
*/
import java.io.Serializable;
import java.time.LocalDateTime;
import java.util.List;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import br.com.conductor.heimdall.core.dto.ReferenceIdDTO;
import br.com.conductor.heimdall.core.entity.AccessToken;
import br.com.conductor.heimdall.core.enums.Status;
import lombok.Data;
/**
* Class that represents the persist for a {@link AccessToken}.
*
* @author Filipe Germano
*
*/
@Data
public class AccessTokenPersist implements Serializable {
private static final long serialVersionUID = -9130167171077204284L;
@Size(max = 250)
private String code;
@NotNull
private ReferenceIdDTO app;
private LocalDateTime expiredDate;
private List<ReferenceIdDTO> plans;
private Status status;
}
=======
package br.com.conductor.heimdall.core.dto.persist;
/*-
* =========================LICENSE_START==================================
* heimdall-core
* ========================================================================
* Copyright (C) 2018 Conductor Tecnologia SA
* ========================================================================
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ==========================LICENSE_END===================================
*/
import java.io.Serializable;
import java.time.LocalDateTime;
import java.util.List;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import br.com.conductor.heimdall.core.dto.ReferenceIdDTO;
import br.com.conductor.heimdall.core.entity.AccessToken;
import br.com.conductor.heimdall.core.enums.Status;
import lombok.Data;
/**
* Class that represents the persist for a {@link AccessToken}.
*
* @author Filipe Germano
*
*/
@Data
public class AccessTokenPersist implements Serializable {
private static final long serialVersionUID = -9130167171077204284L;
@Size(max = 250)
private String code;
@NotNull
private ReferenceIdDTO app;
private LocalDateTime expiredDate;
private List<ReferenceIdDTO> plans;
private Status status;
}
>>>>>>>
package br.com.conductor.heimdall.core.dto.persist;
/*-
* =========================LICENSE_START==================================
* heimdall-core
* ========================================================================
* Copyright (C) 2018 Conductor Tecnologia SA
* ========================================================================
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ==========================LICENSE_END===================================
*/
import java.io.Serializable;
import java.time.LocalDateTime;
import java.util.List;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import br.com.conductor.heimdall.core.dto.ReferenceIdDTO;
import br.com.conductor.heimdall.core.entity.AccessToken;
import br.com.conductor.heimdall.core.enums.Status;
import lombok.Data;
/**
* Class that represents the persist for a {@link AccessToken}.
*
* @author Filipe Germano
*
*/
@Data
public class AccessTokenPersist implements Serializable {
private static final long serialVersionUID = -9130167171077204284L;
@Size(max = 250)
private String code;
@NotNull
private ReferenceIdDTO app;
private LocalDateTime expiredDate;
@NotNull
@Size(min = 1)
private List<ReferenceIdDTO> plans;
private Status status;
} |
<<<<<<<
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.n52.iceland.i18n.I18NDAO;
=======
import org.n52.iceland.ds.I18NDAO;
import org.n52.iceland.exception.ows.OwsExceptionReport;
>>>>>>>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.n52.iceland.exception.ows.OwsExceptionReport;
import org.n52.iceland.i18n.I18NDAO;
<<<<<<<
import org.n52.iceland.ogc.ows.OwsExceptionReport;
import org.n52.iceland.ogc.ows.ServiceIdentificationFactory;
=======
import org.n52.iceland.service.Configurator;
>>>>>>>
import org.n52.iceland.ogc.ows.ServiceIdentificationFactory; |
<<<<<<<
import java.time.LocalDateTime;
import br.com.conductor.heimdall.core.util.LocalDateTimeSerializer;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
=======
>>>>>>>
import java.time.LocalDateTime;
import br.com.conductor.heimdall.core.util.LocalDateTimeSerializer;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
<<<<<<<
=======
import java.time.LocalDateTime;
>>>>>>>
import java.time.LocalDateTime;
<<<<<<<
=======
private String insertedOnDate = LocalDateTime.now().toString();
>>>>>>> |
<<<<<<<
Api api = new Api(10L, "foo", "v1", "fooDescription", "/foo", false, LocalDateTime.now(), new LinkedList<Resource>(), Status.ACTIVE, null, null, environments, null, null);
=======
Api api = new Api(10L, "foo", "v1", "fooDescription", "/foo", LocalDateTime.now(), new HashSet<>(), Status.ACTIVE, null, null, environments, null, null);
>>>>>>>
Api api = new Api(10L, "foo", "v1", "fooDescription", "/foo", false, LocalDateTime.now(), new HashSet<>(), Status.ACTIVE, null, null, environments, null, null); |
<<<<<<<
public static final String PRIVILEGE_READ_LDAP = "hasAuthority('READ_LDAP')";
public static final String PRIVILEGE_UPDATE_LDAP = "hasAuthority('UPDATE_LDAP')";
=======
public static final String PRIVILEGE_READ_METRICS = "hasAuthority('READ_METRICS')";
>>>>>>>
public static final String PRIVILEGE_READ_LDAP = "hasAuthority('READ_LDAP')";
public static final String PRIVILEGE_UPDATE_LDAP = "hasAuthority('UPDATE_LDAP')";
public static final String PRIVILEGE_READ_METRICS = "hasAuthority('READ_METRICS')"; |
<<<<<<<
private BookmarkReader reader;
private HashMap<String, HashMap<Integer, ArrayList<Long>>> userTagTimes ;
=======
private BookmarkReader userTweetReader;
private BookmarkReader socialTweetReader;
private HashMap<String, HashMap<String, ArrayList<Long>>> userTagTimes ;
>>>>>>>
private String filename;
private BookmarkReader reader;
private HashMap<String, HashMap<Integer, ArrayList<Long>>> userTagTimes ;
<<<<<<<
/**
* @param tweetFilename
* @param networkFilename
* @param userInfoPath
* @param trainSize
* @param sampleSize
*/
public SocialCalculator(String tweetFilename, String networkFilename, int trainSize, int sampleSize) {
this.sampleSize = sampleSize;
=======
/**
*
* @param userTweetFilename
* @param socialTweetFilename
* @param networkFilename
* @param userInfoPath
* @param trainSize
* @param sampleSize
*/
public SocialCalculator(String userTweetFilename, String socialTweetFilename, String networkFilename, String userInfoPath, int trainSize, int sampleSize) {
this.sampleSize = sampleSize;
>>>>>>>
public SocialCalculator(String userTweetFilename, String networkFilename, int trainSize, int testSize) {
this.filename = userTweetFilename;
<<<<<<<
reader = new BookmarkReader(trainSize, false);
reader.readFile(tweetFilename);
List<Bookmark> bookmarkList = reader.getBookmarks();
this.users = reader.getUsers();
this.sampleSize = bookmarkList.size();
this.trainSize = (int) ((int)bookmarkList.size() * 0.9);
this.userTagTimes = getUserTagTime(bookmarkList.subList(0, this.trainSize));
this.idNameMap = reader.getUsers();
=======
this.userInfoPath = userInfoPath;
userTweetReader = new BookmarkReader(trainSize, false);
userTweetReader.readFile(userTweetFilename);
this.users = userTweetReader.getUsers();
this.tags = userTweetReader.getTags();
this.idNameMap = userTweetReader.getUsers();
this.nameIdMap = this.getNameIdMap(idNameMap);
this.addBookmarks(userTweetReader.getBookmarks().subList(0, this.trainSize));
if (socialTweetFilename != null){
socialTweetReader = new BookmarkReader(0, false);
socialTweetReader.readFile(socialTweetFilename);
this.addBookmarks(socialTweetReader.getBookmarks());
}
>>>>>>>
reader = new BookmarkReader(trainSize, false);
reader.readFile(userTweetFilename);
this.users = reader.getUsers();
this.idNameMap = reader.getUsers();
// initialise the predictor with the basic training dataset
this.addBookmarks(reader.getBookmarks().subList(0, this.trainSize));
<<<<<<<
/**
*
=======
/**
>>>>>>>
/**
<<<<<<<
private HashMap<String, HashMap<Integer, ArrayList<Long>>> getUserTagTime(List<Bookmark> bookmarkList){
HashMap<String, HashMap<Integer, ArrayList<Long>>> userTagTimes = new HashMap<String, HashMap<Integer,ArrayList<Long>>>();
=======
private void addBookmarks(List<Bookmark> bookmarkList){
if (this.userTagTimes == null){
this.userTagTimes = new HashMap<String, HashMap<String,ArrayList<Long>>>();
}
>>>>>>>
private void addBookmarks(List<Bookmark> bookmarkList){
if (this.userTagTimes == null){
this.userTagTimes = new HashMap<String, HashMap<Integer,ArrayList<Long>>>();
}
<<<<<<<
Bookmark data = reader.getBookmarks().get(i);
Map<Integer, Double> map = getRankedTagList(data.getUserID(), data.getTimestampAsLong());
=======
Bookmark data = userTweetReader.getBookmarks().get(i);
Map<String, Double> map = getRankedTagList(data.getUserID(), data.getTimestampAsLong());
>>>>>>>
Bookmark data = reader.getBookmarks().get(i);
Map<Integer, Double> map = getRankedTagList(data.getUserID(), data.getTimestampAsLong());
<<<<<<<
reader.setTestLines(reader.getBookmarks().subList(trainSize, reader.getBookmarks().size()));
PredictionFileWriter writer = new PredictionFileWriter(reader, predictionValues);
//writer.writeFile(this.twee + "_social")
return reader;
=======
userTweetReader.setTestLines(userTweetReader.getBookmarks().subList(trainSize, userTweetReader.getBookmarks().size()));
PredictionFileWriter writer = new PredictionFileWriter(userTweetReader, predictionValues);
return userTweetReader;
>>>>>>>
reader.setTestLines(reader.getBookmarks().subList(trainSize, reader.getBookmarks().size()));
PredictionFileWriter writer = new PredictionFileWriter(reader, predictionValues);
return reader; |
<<<<<<<
import minegame159.meteorclient.utils.Utils;
import net.minecraft.client.gui.screen.ingame.HandledScreen;
=======
import net.minecraft.client.gui.screen.ingame.ContainerScreen;
import net.minecraft.container.SlotActionType;
>>>>>>>
import net.minecraft.client.gui.screen.ingame.HandledScreen;
<<<<<<<
if(ModuleManager.INSTANCE.get(AutoTotem.class).getLocked()) return;
if(Asimov.get() && !(mc.currentScreen instanceof HandledScreen<?>)){
=======
if (ModuleManager.INSTANCE.get(AutoTotem.class).getLocked()) return;
if (Asimov.get() && !(mc.currentScreen instanceof ContainerScreen<?>)) {
>>>>>>>
if (ModuleManager.INSTANCE.get(AutoTotem.class).getLocked()) return;
if (Asimov.get() && !(mc.currentScreen instanceof HandledScreen<?>)) {
<<<<<<<
if(ModuleManager.INSTANCE.get(AutoTotem.class).getLocked()) return;
if((mc.player.getOffHandStack().getItem() != Items.TOTEM_OF_UNDYING || (mc.player.getHealth() + mc.player.getAbsorptionAmount() > health.get())
&& (mc.player.getOffHandStack().getItem() != getItem()) && !(mc.currentScreen instanceof HandledScreen<?>))){
=======
if (ModuleManager.INSTANCE.get(AutoTotem.class).getLocked()) return;
if ((mc.player.getOffHandStack().getItem() != Items.TOTEM_OF_UNDYING || (mc.player.getHealth() + mc.player.getAbsorptionAmount() > health.get())
&& (mc.player.getOffHandStack().getItem() != getItem()) && !(mc.currentScreen instanceof ContainerScreen<?>))) {
>>>>>>>
if (ModuleManager.INSTANCE.get(AutoTotem.class).getLocked()) return;
if ((mc.player.getOffHandStack().getItem() != Items.TOTEM_OF_UNDYING || (mc.player.getHealth() + mc.player.getAbsorptionAmount() > health.get())
&& (mc.player.getOffHandStack().getItem() != getItem()) && !(mc.currentScreen instanceof HandledScreen<?>))) { |
<<<<<<<
if (FriendManager.INSTANCE.attack((PlayerEntity) entity)) {
render(event, entity, playersColor.get());
}else{
render(event, entity, friendsColor.get());
}
}
else {
switch (entity.getType().getSpawnGroup()) {
=======
render(event, entity, FriendManager.INSTANCE.getColor((PlayerEntity) entity, playersColor.get()));
} else {
switch (entity.getType().getCategory()) {
>>>>>>>
render(event, entity, FriendManager.INSTANCE.getColor((PlayerEntity) entity, playersColor.get()));
} else {
switch (entity.getType().getSpawnGroup()) { |
<<<<<<<
@Inject(method = "applyFog", at = @At("HEAD"), cancellable = true)
private static void onApplyFog(Camera camera, BackgroundRenderer.FogType fogType, float viewDistance, boolean thickFog, CallbackInfo info) {
if (ModuleManager.INSTANCE.isActive(AntiFog.class) || ModuleManager.INSTANCE.isActive(XRay.class)) info.cancel();
=======
@Inject(method = "applyFog", at = @At("TAIL"))
private void onApplyFog(Camera camera, int i, CallbackInfo info) {
if (ModuleManager.INSTANCE.isActive(AntiFog.class) || ModuleManager.INSTANCE.isActive(XRay.class)) {
GlStateManager.disableFog();
}
>>>>>>>
@Inject(method = "applyFog", at = @At("TAIL"))
private static void onApplyFog(Camera camera, BackgroundRenderer.FogType fogType, float viewDistance, boolean thickFog, CallbackInfo info) {
if (ModuleManager.INSTANCE.isActive(AntiFog.class) || ModuleManager.INSTANCE.isActive(XRay.class)) {
GlStateManager.disableFog();
} |
<<<<<<<
import net.minecraft.text.LiteralText;
import net.minecraft.text.OrderedText;
=======
>>>>>>>
import net.minecraft.text.LiteralText;
import net.minecraft.text.OrderedText;
<<<<<<<
@Shadow public abstract int getWidth();
@Shadow @Final private List<ChatHudLine<Text>> messages;
=======
@Shadow @Final private List<ChatHudLine> messages;
>>>>>>>
@Shadow @Final private List<ChatHudLine<Text>> messages;
<<<<<<<
private void onAddMessage(Text text, int messageId, int timestamp, boolean bl, CallbackInfo info) {
String message = text.getString();
// Ignore players
for (String name : Ignore.ignoredPlayers) {
if (message.contains("<" + name + ">")) {
info.cancel();
return;
}
}
// Anti Spam
AntiSpam antiSpam = ModuleManager.INSTANCE.get(AntiSpam.class);
for (int i = 0; i < antiSpam.getDepth(); i++) {
if (checkMsg(message, timestamp, messageId, i)) {
if (antiSpam.isMoveToBottom() && i != 0) {
ChatHudLine msg = visibleMessages.remove(i);
visibleMessages.add(0, msg);
messages.add(0, msg);
}
info.cancel();
return;
}
}
//Friend Colour
if (ModuleManager.INSTANCE.get(FriendColor.class).isActive() && !message.equals(lastMessage)) {
String convert = message;
List<Friend> friends = FriendManager.INSTANCE.getAll();
for (Friend friend : friends) {
if (convert.contains(friend.name)) {
convert = convert.replaceAll(friend.name, "§d" + friend.name + "§r");
}
}
lastMessage = convert;
Utils.sendMessage(convert);
lastMessage = null;
=======
private void onAddMessage(Text message, int messageId, int timestamp, boolean bl, CallbackInfo info) {
// Better Chat
if (ModuleManager.INSTANCE.get(BetterChat.class).onMsg(message.getString(), messageId, timestamp, messages, visibleMessages)) {
>>>>>>>
private void onAddMessage(Text message, int messageId, int timestamp, boolean bl, CallbackInfo info) {
// Better Chat
if (ModuleManager.INSTANCE.get(BetterChat.class).onMsg(message.getString(), messageId, timestamp, messages, visibleMessages)) {
<<<<<<<
private boolean checkMsg(String newMsg, int newTimestamp, int newId, int msgI) {
ChatHudLine<OrderedText> msg = visibleMessages.size() > msgI ? visibleMessages.get(msgI) : null;
if (msg == null) return false;
String msgString = msg.getText().toString();
if (msgString.equals(newMsg)) {
msgString += Formatting.GRAY + " (2)";
((IChatHudLine) msg).setText(new LiteralText(msgString));
((IChatHudLine) msg).setTimestamp(newTimestamp);
((IChatHudLine) msg).setId(newId);
return true;
} else {
Matcher matcher = Pattern.compile(".*(\\([0-9]+\\)$)").matcher(msgString);
if (matcher.matches()) {
String group = matcher.group(1);
int number = Integer.parseInt(group.substring(1, group.length() - 1));
int i = msgString.lastIndexOf(group);
msgString = msgString.substring(0, i - Formatting.GRAY.toString().length() - 1);
if (msgString.equals(newMsg)) {
msgString += Formatting.GRAY + " (" + (number + 1) + ")";
((IChatHudLine) msg).setText(new LiteralText(msgString));
((IChatHudLine) msg).setTimestamp(newTimestamp);
((IChatHudLine) msg).setId(newId);
return true;
}
}
return false;
}
=======
@Redirect(method = "addMessage(Lnet/minecraft/text/Text;IIZ)V", at = @At(value = "INVOKE", target = "Ljava/util/List;size()I"))
private int addMessageListSizeProxy(List<ChatHudLine> list) {
BetterChat betterChat = ModuleManager.INSTANCE.get(BetterChat.class);
return betterChat.isLongerChat() && betterChat.getChatLength() > 100 ? 1 : list.size();
>>>>>>>
@Redirect(method = "addMessage(Lnet/minecraft/text/Text;IIZ)V", at = @At(value = "INVOKE", target = "Ljava/util/List;size()I"))
private int addMessageListSizeProxy(List<ChatHudLine> list) {
BetterChat betterChat = ModuleManager.INSTANCE.get(BetterChat.class);
return betterChat.isLongerChat() && betterChat.getChatLength() > 100 ? 1 : list.size(); |
<<<<<<<
if (Asimov.get() && !(mc.currentScreen instanceof HandledScreen<?>)) {
=======
if ((Asimov.get() || noTotems) && !(mc.currentScreen instanceof ContainerScreen<?>)) {
>>>>>>>
if ((Asimov.get() || noTotems) && !(mc.currentScreen instanceof HandledScreen<?>)) { |
<<<<<<<
import minegame159.meteorclient.utils.InvUtils;
import minegame159.meteorclient.utils.Utils;
=======
import minegame159.meteorclient.utils.Chat;
>>>>>>>
import minegame159.meteorclient.utils.InvUtils;
import minegame159.meteorclient.utils.Chat; |
<<<<<<<
import minegame159.meteorclient.events.RenderEvent;
import net.minecraft.client.MinecraftClient;
import net.minecraft.client.render.Camera;
import net.minecraft.client.util.math.MatrixStack;
import net.minecraft.util.math.Quaternion;
=======
import org.lwjgl.opengl.GL11;
>>>>>>>
import net.minecraft.client.util.math.MatrixStack;
import net.minecraft.util.math.Quaternion; |
<<<<<<<
drawInfo("FPS: ", ((IMinecraftClient) MinecraftClient.getInstance()).getFps() + "", y);
y += Utils.getTextHeight() + 2;
=======
drawInfo("FPS: ", MinecraftClient.getCurrentFps() + "", y);
y += MeteorClient.FONT.getHeight() + 2;
>>>>>>>
drawInfo("FPS: ", ((IMinecraftClient) MinecraftClient.getInstance()).getFps() + "", y);
y += MeteorClient.FONT.getHeight() + 2;
<<<<<<<
private void drawInfoRight(String text1, String text2, int y, int text1Color) {
drawInfo(text1, text2, mc.getWindow().getScaledWidth() - Utils.getTextWidth(text1) - Utils.getTextWidth(text2) - 2, y, text1Color);
=======
private void drawInfoRight(String text1, String text2, int y, Color text1Color) {
drawInfo(text1, text2, mc.window.getScaledWidth() - MeteorClient.FONT.getStringWidth(text1) - MeteorClient.FONT.getStringWidth(text2) - 2, y, text1Color);
>>>>>>>
private void drawInfoRight(String text1, String text2, int y, Color text1Color) {
drawInfo(text1, text2, mc.getWindow().getScaledWidth() - MeteorClient.FONT.getStringWidth(text1) - MeteorClient.FONT.getStringWidth(text2) - 2, y, text1Color);
<<<<<<<
drawPosition(event.screenWidth, "Nether Pos: ", y, mc.player.getX() / 8.0, mc.player.getY() / 8.0, mc.player.getZ() / 8.0);
y -= Utils.getTextHeight() + 2;
drawPosition(event.screenWidth, "Pos: ", y, mc.player.getX(), mc.player.getY(), mc.player.getZ());
y -= Utils.getTextHeight() + 2;
=======
drawPosition(event.screenWidth, "Nether Pos: ", y, mc.player.x / 8.0, mc.player.y / 8.0, mc.player.z / 8.0);
y -= MeteorClient.FONT.getHeight() + 2;
drawPosition(event.screenWidth, "Pos: ", y, mc.player.x, mc.player.y, mc.player.z);
y -= MeteorClient.FONT.getHeight() + 2;
>>>>>>>
drawPosition(event.screenWidth, "Nether Pos: ", y, mc.player.getX() / 8.0, mc.player.getY() / 8.0, mc.player.getZ() / 8.0);
y -= MeteorClient.FONT.getHeight() + 2;
drawPosition(event.screenWidth, "Pos: ", y, mc.player.getX(), mc.player.getY(), mc.player.getZ());
y -= MeteorClient.FONT.getHeight() + 2;
<<<<<<<
drawPosition(event.screenWidth, "Overworld Pos: ", y, mc.player.getX() * 8.0, mc.player.getY() * 8.0, mc.player.getZ() * 8.0);
y -= Utils.getTextHeight() + 2;
drawPosition(event.screenWidth, "Pos: ", y, mc.player.getX(), mc.player.getY(), mc.player.getZ());
y -= Utils.getTextHeight() + 2;
=======
drawPosition(event.screenWidth, "Overworld Pos: ", y, mc.player.x * 8.0, mc.player.y * 8.0, mc.player.z * 8.0);
y -= MeteorClient.FONT.getHeight() + 2;
drawPosition(event.screenWidth, "Pos: ", y, mc.player.x, mc.player.y, mc.player.z);
y -= MeteorClient.FONT.getHeight() + 2;
>>>>>>>
drawPosition(event.screenWidth, "Overworld Pos: ", y, mc.player.getX() * 8.0, mc.player.getY() * 8.0, mc.player.getZ() * 8.0);
y -= MeteorClient.FONT.getHeight() + 2;
drawPosition(event.screenWidth, "Pos: ", y, mc.player.getX(), mc.player.getY(), mc.player.getZ());
y -= MeteorClient.FONT.getHeight() + 2;
<<<<<<<
drawPosition(event.screenWidth, "Pos: ", y, mc.player.getX(), mc.player.getY(), mc.player.getZ());
y -= Utils.getTextHeight() + 2;
=======
drawPosition(event.screenWidth, "Pos: ", y, mc.player.x, mc.player.y, mc.player.z);
y -= MeteorClient.FONT.getHeight() + 2;
>>>>>>>
drawPosition(event.screenWidth, "Pos: ", y, mc.player.getX(), mc.player.getY(), mc.player.getZ());
y -= MeteorClient.FONT.getHeight() + 2;
<<<<<<<
drawInfoRight(statusEffect.getName().asString(), " " + (statusEffectInstance.getAmplifier() + 1) + " (" + StatusEffectUtil.durationToString(statusEffectInstance, 1) + ")", y, statusEffect.getColor());
y -= Utils.getTextHeight() + 2;
=======
drawInfoRight(statusEffect.method_5560().asString(), " " + (statusEffectInstance.getAmplifier() + 1) + " (" + StatusEffectUtil.durationToString(statusEffectInstance, 1) + ")", y, new Color(statusEffect.getColor()));
y -= MeteorClient.FONT.getHeight() + 2;
>>>>>>>
drawInfoRight(statusEffect.getName().asString(), " " + (statusEffectInstance.getAmplifier() + 1) + " (" + StatusEffectUtil.durationToString(statusEffectInstance, 1) + ")", y, new Color(statusEffect.getColor()));
y -= MeteorClient.FONT.getHeight() + 2; |
<<<<<<<
=======
import net.minecraft.client.gui.screen.ChatScreen;
import net.minecraft.client.gui.screen.Screen;
>>>>>>>
import net.minecraft.client.gui.screen.ChatScreen;
import net.minecraft.client.gui.screen.Screen;
<<<<<<<
if (ModuleManager.INSTANCE.isActive(InvMove.class)) {
InputUtil.Key key2 = InputUtil.fromKeyCode(key, scancode);
KeyBinding.setKeyPressed(key2, i == GLFW.GLFW_PRESS);
=======
if (ModuleManager.INSTANCE.isActive(InvMove.class) && !(client.currentScreen instanceof ChatScreen)) {
InputUtil.KeyCode keyCode = InputUtil.getKeyCode(key, scancode);
KeyBinding.setKeyPressed(keyCode, i == GLFW.GLFW_PRESS);
>>>>>>>
if (ModuleManager.INSTANCE.isActive(InvMove.class) && !(client.currentScreen instanceof ChatScreen)) {
InputUtil.Key key2 = InputUtil.fromKeyCode(key, scancode);
KeyBinding.setKeyPressed(key2, i == GLFW.GLFW_PRESS); |
<<<<<<<
if (delayLeft > 0) {
delayLeft--;
return;
} else {
delayLeft = delay.get();
}
if(place.get()) {
Iterator<AbstractClientPlayerEntity> validEntities = mc.world.getPlayers().stream()
.filter(FriendManager.INSTANCE::attack)
.filter(entityPlayer -> !entityPlayer.getDisplayName().equals(mc.player.getDisplayName()))
.filter(entityPlayer -> mc.player.distanceTo(entityPlayer) <= 10)
.collect(Collectors.toList())
.iterator();
AbstractClientPlayerEntity target;
if (validEntities.hasNext()) {
target = validEntities.next();
} else {
return;
}
for (AbstractClientPlayerEntity i = null; validEntities.hasNext(); i = validEntities.next()) {
if (i == null) continue;
if (mc.player.distanceTo(i) < mc.player.distanceTo(target)) {
target = i;
}
}
List<BlockPos> validBlocks = findValidBlocks(target);
BlockPos bestBlock = null;
for (BlockPos blockPos : validBlocks) {
BlockPos pos = blockPos.up();
if (DamageCalcUtils.crystalDamage(target, new Vec3d(pos.getX(), pos.getY(), pos.getZ())) > minDamage.get()) {
pos = blockPos.up();
if (bestBlock == null) {
bestBlock = blockPos;
continue;
}
BlockPos pos2 = bestBlock.up();
if (DamageCalcUtils.crystalDamage(target, new Vec3d(pos.getX(), pos.getY(), pos.getZ()))
> DamageCalcUtils.crystalDamage(target, new Vec3d(pos2.getX(), pos2.getY(), pos2.getZ()))) {
bestBlock = blockPos;
}
}
}
if (bestBlock != null) {
if(autoSwitch.get() && mc.player.getMainHandStack().getItem() != Items.END_CRYSTAL){
int slot = InvUtils.findItemWithCount(Items.END_CRYSTAL).slot;
if(slot != -1 && slot < 9){
if (spoofChange.get()) preSlot = mc.player.inventory.selectedSlot;
mc.player.inventory.selectedSlot = slot;
}
}
Hand hand = Hand.MAIN_HAND;
if (mc.player.getMainHandStack().getItem() != Items.END_CRYSTAL && mc.player.getOffHandStack().getItem() == Items.END_CRYSTAL) hand = Hand.OFF_HAND;
else if (mc.player.getMainHandStack().getItem() != Items.END_CRYSTAL && mc.player.getOffHandStack().getItem() != Items.END_CRYSTAL) {return;}
placeBlock(bestBlock, hand);
if (smartDelay.get()){
if (DamageCalcUtils.crystalDamage(target, target.getPos()) - DamageCalcUtils.crystalDamage(target, new Vec3d(bestBlock.getX(), bestBlock.getY(), bestBlock.getZ())) < 10) {
delayLeft = 10;
}
}
}
if (spoofChange.get() && preSlot != mc.player.inventory.selectedSlot) mc.player.inventory.selectedSlot = preSlot;
}
=======
>>>>>>>
<<<<<<<
BlockPos pos = entity.getBlockPos();
Vec3d vec1 = new Vec3d(pos.getX(), pos.getY(), pos.getZ());
PlayerMoveC2SPacket.LookOnly packet = new PlayerMoveC2SPacket.LookOnly(Utils.getNeededYaw(vec1), Utils.getNeededPitch(vec1), mc.player.isOnGround());
=======
Vec3d vec1 = entity.getPos();
PlayerMoveC2SPacket.LookOnly packet = new PlayerMoveC2SPacket.LookOnly(Utils.getNeededYaw(vec1), Utils.getNeededPitch(vec1), mc.player.onGround);
>>>>>>>
Vec3d vec1 = entity.getPos();
PlayerMoveC2SPacket.LookOnly packet = new PlayerMoveC2SPacket.LookOnly(Utils.getNeededYaw(vec1), Utils.getNeededPitch(vec1), mc.player.onGround);
<<<<<<<
private List<BlockPos> findValidBlocks(AbstractClientPlayerEntity target){
Iterator<BlockPos> allBlocks = getRange(mc.player.getBlockPos(), placeRange.get()).iterator();
List<BlockPos> validBlocks = new ArrayList<>();
for(BlockPos i = null; allBlocks.hasNext(); i = allBlocks.next()){
if(i == null) continue;
if((mc.world.getBlockState(i).getBlock() == Blocks.BEDROCK
|| mc.world.getBlockState(i).getBlock() == Blocks.OBSIDIAN)
&& isEmpty(i.up())){
if (!strict.get()) {
validBlocks.add(i);
} else if (strict.get() && isEmpty(i.up(2))) {
validBlocks.add(i);
}
}
}
validBlocks.sort(Comparator.comparingDouble(value -> {
BlockPos pos = value.up();
return DamageCalcUtils.crystalDamage(target, new Vec3d(pos.getX(), pos.getY(), pos.getZ()));
}));
validBlocks.removeIf(blockpos -> {
BlockPos pos = blockpos.up();
return DamageCalcUtils.crystalDamage(mc.player, new Vec3d(pos.getX(), pos.getY(), pos.getZ())) > maxDamage.get();
});
Collections.reverse(validBlocks);
return validBlocks;
}
private List<BlockPos> getRange(BlockPos player, double range){
List<BlockPos> allBlocks = new ArrayList<>();
for(double i = player.getX() - range; i < player.getX() + range; i++){
for(double j = player.getZ() - range; j < player.getZ() + range; j++){
for(int k = player.getY() - 3; k < player.getY() + 3; k++){
BlockPos x = new BlockPos(i, k, j);
allBlocks.add(x);
=======
private void findValidBlocks(AbstractClientPlayerEntity target){
bestBlock = null;
playerPos = mc.player.getPos();
for(double i = playerPos.x - placeRange.get(); i < playerPos.x + placeRange.get(); i++){
for(double j = playerPos.z - placeRange.get(); j < playerPos.z + placeRange.get(); j++){
for(double k = playerPos.y - 3; k < playerPos.y + 3; k++){
pos = new Vec3d(i, k, j);
if((mc.world.getBlockState(new BlockPos(pos)).getBlock() == Blocks.BEDROCK
|| mc.world.getBlockState(new BlockPos(pos)).getBlock() == Blocks.OBSIDIAN)
&& isEmpty(new BlockPos(pos.add(0, 1, 0)))){
if (!strict.get()) {
if (bestBlock == null) {
bestBlock = pos;
bestDamage = DamageCalcUtils.crystalDamage(target, bestBlock.add(0.5, 1.5, 0.5));
}
if (bestDamage < DamageCalcUtils.crystalDamage(target, pos.add(0.5, 1.5, 0.5))
&& (DamageCalcUtils.crystalDamage(mc.player, pos.add(0.5,1.5, 0.5)) < minDamage.get() || mode.get() == Mode.suicide)) {
bestBlock = pos;
bestDamage = DamageCalcUtils.crystalDamage(target, bestBlock.add(0.5, 1.5, 0.5));
}
} else if (strict.get() && isEmpty(new BlockPos(pos.add(0, 2, 0)))) {
if (bestBlock == null) {
bestBlock = pos;
bestDamage = DamageCalcUtils.crystalDamage(target, bestBlock.add(0.5, 1.5, 0.5));
}
if (bestDamage
< DamageCalcUtils.crystalDamage(target, pos.add(0.5, 1.5, 0.5))
&& (DamageCalcUtils.crystalDamage(mc.player, pos.add( 0.5, 1.5, 0.5)) < minDamage.get()) || mode.get() == Mode.suicide) {
bestBlock = pos;
bestDamage = DamageCalcUtils.crystalDamage(target, bestBlock.add(0.5, 1.5, 0.5));
}
}
}
>>>>>>>
private void findValidBlocks(AbstractClientPlayerEntity target){
bestBlock = null;
playerPos = mc.player.getPos();
for(double i = playerPos.x - placeRange.get(); i < playerPos.x + placeRange.get(); i++){
for(double j = playerPos.z - placeRange.get(); j < playerPos.z + placeRange.get(); j++){
for(double k = playerPos.y - 3; k < playerPos.y + 3; k++){
pos = new Vec3d(i, k, j);
if((mc.world.getBlockState(new BlockPos(pos)).getBlock() == Blocks.BEDROCK
|| mc.world.getBlockState(new BlockPos(pos)).getBlock() == Blocks.OBSIDIAN)
&& isEmpty(new BlockPos(pos.add(0, 1, 0)))){
if (!strict.get()) {
if (bestBlock == null) {
bestBlock = pos;
bestDamage = DamageCalcUtils.crystalDamage(target, bestBlock.add(0.5, 1.5, 0.5));
}
if (bestDamage < DamageCalcUtils.crystalDamage(target, pos.add(0.5, 1.5, 0.5))
&& (DamageCalcUtils.crystalDamage(mc.player, pos.add(0.5,1.5, 0.5)) < minDamage.get() || mode.get() == Mode.suicide)) {
bestBlock = pos;
bestDamage = DamageCalcUtils.crystalDamage(target, bestBlock.add(0.5, 1.5, 0.5));
}
} else if (strict.get() && isEmpty(new BlockPos(pos.add(0, 2, 0)))) {
if (bestBlock == null) {
bestBlock = pos;
bestDamage = DamageCalcUtils.crystalDamage(target, bestBlock.add(0.5, 1.5, 0.5));
}
if (bestDamage
< DamageCalcUtils.crystalDamage(target, pos.add(0.5, 1.5, 0.5))
&& (DamageCalcUtils.crystalDamage(mc.player, pos.add( 0.5, 1.5, 0.5)) < minDamage.get()) || mode.get() == Mode.suicide) {
bestBlock = pos;
bestDamage = DamageCalcUtils.crystalDamage(target, bestBlock.add(0.5, 1.5, 0.5));
}
}
} |
<<<<<<<
import net.minecraft.client.util.math.Matrix4f;
=======
import net.minecraft.client.render.Camera;
import net.minecraft.entity.Entity;
>>>>>>>
import net.minecraft.client.render.Camera;
import net.minecraft.entity.Entity;
import net.minecraft.client.util.math.Matrix4f;
<<<<<<<
public static int getTextWidth(String text) {
return MeteorClient.TEXT_RENDERER.getStringWidth(text);
}
public static int getTextHeight() {
return MeteorClient.TEXT_RENDERER.getHeight() + 2;
}
public static void drawText(String text, float x, float y, int color) {
MeteorClient.TEXT_RENDERER.drawString(text, x, y + 1, color);
}
public static void drawText(Matrix4f matrix4f, String text, float x, float y, int color) {
MeteorClient.TEXT_RENDERER.drawString(matrix4f, text, x, y + 1, color);
}
public static void drawTextWithShadow(String text, float x, float y, int color) {
MeteorClient.TEXT_RENDERER.drawStringWithShadow(text, x, y + 1, color);
}
public static void drawTextWithShadow(Matrix4f matrix4f, String text, float x, float y, int color) {
MeteorClient.TEXT_RENDERER.drawStringWithShadow(matrix4f, text, x, y + 1, color);
}
=======
>>>>>>> |
<<<<<<<
Map<Enchantment, Integer> enchantments = EnchantmentHelper.get(itemStack);
=======
Map<Enchantment, Integer> enchantments = EnchantmentHelper.getEnchantments(itemStack);
Map<Enchantment, Integer> enchantmentsToShowScale = new HashMap<>();
for (Enchantment enchantment : displayedEnchantments.get()) {
if (enchantments.containsKey(enchantment)) {
enchantmentsToShowScale.put(enchantment, enchantments.get(enchantment));
}
}
>>>>>>>
Map<Enchantment, Integer> enchantments = EnchantmentHelper.get(itemStack);
Map<Enchantment, Integer> enchantmentsToShowScale = new HashMap<>();
for (Enchantment enchantment : displayedEnchantments.get()) {
if (enchantments.containsKey(enchantment)) {
enchantmentsToShowScale.put(enchantment, enchantments.get(enchantment));
}
}
<<<<<<<
Map<Enchantment, Integer> enchantments = EnchantmentHelper.get(itemStack);
=======
Map<Enchantment, Integer> enchantments = EnchantmentHelper.getEnchantments(itemStack);
Map<Enchantment, Integer> enchantmentsToShow = new HashMap<>();
for (Enchantment enchantment : displayedEnchantments.get()) {
if (enchantments.containsKey(enchantment)) {
enchantmentsToShow.put(enchantment, enchantments.get(enchantment));
}
}
>>>>>>>
Map<Enchantment, Integer> enchantments = EnchantmentHelper.get(itemStack);
Map<Enchantment, Integer> enchantmentsToShow = new HashMap<>();
for (Enchantment enchantment : displayedEnchantments.get()) {
if (enchantments.containsKey(enchantment)) {
enchantmentsToShow.put(enchantment, enchantments.get(enchantment));
}
} |
<<<<<<<
private Listener<TickEvent> onTick = new Listener<>(event -> {
if (onlyOnGround.get() && !mc.player.isOnGround()) return;
=======
private final Listener<TickEvent> onTick = new Listener<>(event -> {
if (onlyOnGround.get() && !mc.player.onGround) return;
>>>>>>>
private final Listener<TickEvent> onTick = new Listener<>(event -> {
if (onlyOnGround.get() && !mc.player.isOnGround()) return; |
<<<<<<<
import minegame159.meteorclient.modules.render.NoRender;
=======
import minegame159.meteorclient.utils.Outlines;
>>>>>>>
import minegame159.meteorclient.modules.render.NoRender;
import minegame159.meteorclient.utils.Outlines;
<<<<<<<
import net.minecraft.client.util.math.MatrixStack;
import net.minecraft.entity.Entity;
import net.minecraft.util.math.BlockPos;
=======
import net.minecraft.client.render.entity.EntityRenderDispatcher;
import net.minecraft.util.hit.HitResult;
import org.spongepowered.asm.mixin.Final;
>>>>>>>
import net.minecraft.client.render.entity.EntityRenderDispatcher;
import net.minecraft.client.util.math.MatrixStack;
import net.minecraft.entity.Entity;
import net.minecraft.util.math.BlockPos;
import org.spongepowered.asm.mixin.Final;
<<<<<<<
@Inject(method = "checkEmpty", at = @At("HEAD"), cancellable = true)
private void onCheckEmpty(MatrixStack matrixStack, CallbackInfo info) {
info.cancel();
}
@Inject(method = "renderWeather", at = @At("HEAD"), cancellable = true)
private void onRenderWeather(LightmapTextureManager manager, float f, double d, double e, double g, CallbackInfo info) {
if (ModuleManager.INSTANCE.get(NoRender.class).noWeather()) info.cancel();
}
@Inject(method = "renderEntity", at = @At("HEAD"))
private void onRenderEntitiesHead(Entity entity, double cameraX, double cameraY, double cameraZ, float tickDelta, MatrixStack matrix, VertexConsumerProvider vertexConsumers, CallbackInfo info) {
=======
@Shadow @Final private EntityRenderDispatcher entityRenderDispatcher;
@Inject(method = "loadEntityOutlineShader", at = @At("TAIL"))
private void onLoadEntityOutlineShader(CallbackInfo info) {
Outlines.load();
}
@Inject(method = "renderEntities", at = @At("HEAD"))
private void onRenderEntitiesHead(Camera camera, VisibleRegion visibleRegion, float tickDelta, CallbackInfo info) {
>>>>>>>
@Shadow @Final private EntityRenderDispatcher entityRenderDispatcher;
@Inject(method = "loadEntityOutlineShader", at = @At("TAIL"))
private void onLoadEntityOutlineShader(CallbackInfo info) {
Outlines.load();
}
@Inject(method = "checkEmpty", at = @At("HEAD"), cancellable = true)
private void onCheckEmpty(MatrixStack matrixStack, CallbackInfo info) {
info.cancel();
}
@Inject(method = "renderWeather", at = @At("HEAD"), cancellable = true)
private void onRenderWeather(LightmapTextureManager manager, float f, double d, double e, double g, CallbackInfo info) {
if (ModuleManager.INSTANCE.get(NoRender.class).noWeather()) info.cancel();
}
@Inject(method = "renderEntity", at = @At("HEAD"))
private void onRenderEntitiesHead(Entity entity, double cameraX, double cameraY, double cameraZ, float tickDelta, MatrixStack matrix, VertexConsumerProvider vertexConsumers, CallbackInfo info) {
<<<<<<<
@Inject(method = "drawBlockOutline", at = @At("HEAD"), cancellable = true)
private void onDrawHighlightedBlockOutline(MatrixStack matrixStack, VertexConsumer vertexConsumer, Entity entity, double d, double e, double f, BlockPos blockPos, BlockState blockState, CallbackInfo info) {
=======
@Inject(method = "renderEntities", at = @At(value = "INVOKE", target = "Lnet/minecraft/client/render/WorldRenderer;canDrawEntityOutlines()Z"))
private void onRenderEntitiesOutlines(Camera camera, VisibleRegion visibleRegion, float tickDelta, CallbackInfo info) {
Outlines.render(entityRenderDispatcher, visibleRegion, camera, tickDelta);
}
@Inject(method = "drawEntityOutlinesFramebuffer", at = @At(value = "INVOKE", target = "Lnet/minecraft/client/gl/Framebuffer;drawInternal(IIZ)V"))
private void onDrawEntityOutlinesFramebuffer(CallbackInfo info) {
Outlines.renderFbo();
}
@Inject(method = "onResized", at = @At("HEAD"))
private void onResized(int i, int j, CallbackInfo info) {
Outlines.onResized(i, j);
}
@Inject(method = "drawHighlightedBlockOutline", at = @At("HEAD"), cancellable = true)
private void onDrawHighlightedBlockOutline(Camera camera, HitResult hit, int renderPass, CallbackInfo info) {
>>>>>>>
@Inject(method = "renderEntities", at = @At(value = "INVOKE", target = "Lnet/minecraft/client/render/WorldRenderer;canDrawEntityOutlines()Z"))
private void onRenderEntitiesOutlines(Camera camera, VisibleRegion visibleRegion, float tickDelta, CallbackInfo info) {
Outlines.render(entityRenderDispatcher, visibleRegion, camera, tickDelta);
}
@Inject(method = "drawEntityOutlinesFramebuffer", at = @At(value = "INVOKE", target = "Lnet/minecraft/client/gl/Framebuffer;drawInternal(IIZ)V"))
private void onDrawEntityOutlinesFramebuffer(CallbackInfo info) {
Outlines.renderFbo();
}
@Inject(method = "onResized", at = @At("HEAD"))
private void onResized(int i, int j, CallbackInfo info) {
Outlines.onResized(i, j);
}
@Inject(method = "drawBlockOutline", at = @At("HEAD"), cancellable = true)
private void onDrawHighlightedBlockOutline(MatrixStack matrixStack, VertexConsumer vertexConsumer, Entity entity, double d, double e, double f, BlockPos blockPos, BlockState blockState, CallbackInfo info) { |
<<<<<<<
Vec3d vec2 = entity.getPos().add(0, entity.getEyeHeight(entity.getPose()), 0);
double y = (entity.getBoundingBox().y2 - entity.getBoundingBox().y1) / 2.0;
RenderUtils.line(vec1.x - (mc.cameraEntity.getX() - event.offsetX), vec1.y - (mc.cameraEntity.getY() - event.offsetY), vec1.z - (mc.cameraEntity.getZ() - event.offsetZ), vec2.x, vec2.y - y, vec2.z, color);
=======
double x = entity.x;
double y = entity.y;
double z = entity.z;
double height = entity.getBoundingBox().y2 - entity.getBoundingBox().y1;
if (target.get() == Target.Head) y += height;
else if (target.get() == Target.Body) y += height / 2;
RenderUtils.line(vec1.x - (mc.cameraEntity.x - event.offsetX), vec1.y - (mc.cameraEntity.y - event.offsetY), vec1.z - (mc.cameraEntity.z - event.offsetZ), x, y, z, color);
if (mode.get() == Mode.Stem) RenderUtils.line(x, entity.y, z, x, entity.y + height, z, color);
>>>>>>>
double x = entity.x;
double y = entity.y;
double z = entity.z;
double height = entity.getBoundingBox().y2 - entity.getBoundingBox().y1;
if (target.get() == Target.Head) y += height;
else if (target.get() == Target.Body) y += height / 2;
RenderUtils.line(vec1.x - (mc.cameraEntity.getX() - event.offsetX), vec1.y - (mc.cameraEntity.getY() - event.offsetY), vec1.z - (mc.cameraEntity.getZ() - event.offsetZ), x, y, z, color);
if (mode.get() == Mode.Stem) RenderUtils.line(x, entity.y, z, x, entity.y + height, z, color); |
<<<<<<<
import minegame159.meteorclient.modules.ModuleManager;
import minegame159.meteorclient.modules.movement.SafeWalk;
import minegame159.meteorclient.modules.movement.Scaffold;
=======
import net.minecraft.entity.ItemEntity;
>>>>>>>
import minegame159.meteorclient.modules.ModuleManager;
import minegame159.meteorclient.modules.movement.SafeWalk;
import minegame159.meteorclient.modules.movement.Scaffold;
import net.minecraft.entity.ItemEntity;
<<<<<<<
@Inject(method = "clipAtLedge", at = @At("HEAD"), cancellable = true)
protected void clipAtLedge(CallbackInfoReturnable<Boolean> info) {
Scaffold scaffold = ModuleManager.INSTANCE.get(Scaffold.class);
if (ModuleManager.INSTANCE.isActive(SafeWalk.class) || (scaffold.isActive() && scaffold.hasSafeWalk())) {
info.setReturnValue(true);
}
}
=======
@Inject(method = "dropItem(Lnet/minecraft/item/ItemStack;ZZ)Lnet/minecraft/entity/ItemEntity;", at = @At("HEAD"))
private void onDropItem(ItemStack stack, boolean bl, boolean bl2, CallbackInfoReturnable<ItemEntity> info) {
MeteorClient.EVENT_BUS.post(EventStore.dropItemEvent(stack));
}
>>>>>>>
@Inject(method = "clipAtLedge", at = @At("HEAD"), cancellable = true)
protected void clipAtLedge(CallbackInfoReturnable<Boolean> info) {
Scaffold scaffold = ModuleManager.INSTANCE.get(Scaffold.class);
if (ModuleManager.INSTANCE.isActive(SafeWalk.class) || (scaffold.isActive() && scaffold.hasSafeWalk())) {
info.setReturnValue(true);
}
}
@Inject(method = "dropItem(Lnet/minecraft/item/ItemStack;ZZ)Lnet/minecraft/entity/ItemEntity;", at = @At("HEAD"))
private void onDropItem(ItemStack stack, boolean bl, boolean bl2, CallbackInfoReturnable<ItemEntity> info) {
MeteorClient.EVENT_BUS.post(EventStore.dropItemEvent(stack));
} |
<<<<<<<
for(int i = 9; i < 36; i++){
if(material.get() == materialPreference.None && mc.player.inventory.getStack(i).getItem().getClass() == mc.player.getMainHandStack().getItem().getClass()
&& (mc.player.inventory.getStack(i).getMaxDamage() - mc.player.inventory.getStack(i).getDamage()) > 11){
=======
for(int i = 0; i < 36; i++){
if ((mc.player.inventory.getInvStack(i).getMaxDamage() - mc.player.inventory.getInvStack(i).getDamage()) <= 11) continue;
if(material.get() == materialPreference.None && mc.player.inventory.getInvStack(i).getItem().getClass() == mc.player.getMainHandStack().getItem().getClass()){
>>>>>>>
for(int i = 0; i < 36; i++){
if ((mc.player.inventory.getInvStack(i).getMaxDamage() - mc.player.inventory.getInvStack(i).getDamage()) <= 11) continue;
if(material.get() == materialPreference.None && mc.player.inventory.getInvStack(i).getItem().getClass() == mc.player.getMainHandStack().getItem().getClass()){
<<<<<<<
}else if(material.get() == materialPreference.Best){
if(mc.player.inventory.getStack(i).getItem().getClass() == mc.player.getMainHandStack().getItem().getClass()
&& (mc.player.inventory.getStack(i).getMaxDamage() - mc.player.inventory.getStack(i).getDamage()) > 11){
if(score < Math.round(mc.player.inventory.getStack(i).getMiningSpeedMultiplier(blockState))){
score = Math.round(mc.player.inventory.getStack(i).getMiningSpeedMultiplier(blockState));
=======
}else if(material.get() == materialPreference.Best && blockState != null){
if(mc.player.inventory.getInvStack(i).getItem().getClass() == mc.player.getMainHandStack().getItem().getClass()){
if(score < Math.round(mc.player.inventory.getInvStack(i).getMiningSpeed(blockState))){
score = Math.round(mc.player.inventory.getInvStack(i).getMiningSpeed(blockState));
>>>>>>>
}else if(material.get() == materialPreference.Best && blockState != null){
if(mc.player.inventory.getInvStack(i).getItem().getClass() == mc.player.getMainHandStack().getItem().getClass()){
if(score < Math.round(mc.player.inventory.getInvStack(i).getMiningSpeed(blockState))){
score = Math.round(mc.player.inventory.getInvStack(i).getMiningSpeed(blockState));
<<<<<<<
for(int i = 9; i < 36; i++){
if(mc.player.inventory.getStack(i).getItem().getClass() == mc.player.getMainHandStack().getItem().getClass()
&& (mc.player.inventory.getStack(i).getMaxDamage() - mc.player.inventory.getStack(i).getDamage()) > 11){
=======
for(int i = 0; i < 36; i++){
if(mc.player.inventory.getInvStack(i).getItem().getClass() == mc.player.getMainHandStack().getItem().getClass()
&& (mc.player.inventory.getInvStack(i).getMaxDamage() - mc.player.inventory.getInvStack(i).getDamage()) > 11){
>>>>>>>
for(int i = 0; i < 36; i++){
if(mc.player.inventory.getStack(i).getItem().getClass() == mc.player.getMainHandStack().getItem().getClass()
&& (mc.player.inventory.getStack(i).getMaxDamage() - mc.player.inventory.getStack(i).getDamage()) > 11){
<<<<<<<
ItemStack itemStack = mc.player.inventory.getStack(i);
if (!isEffectiveOn(itemStack.getItem(), blockState)) continue;
=======
ItemStack itemStack = mc.player.inventory.getInvStack(i);
if (!isEffectiveOn(itemStack.getItem(), blockState.getBlock()) || (itemStack.getMaxDamage() - itemStack.getDamage() <= 11)) continue;
>>>>>>>
ItemStack itemStack = mc.player.inventory.getStack(i);
if (!isEffectiveOn(itemStack.getItem(), blockState) || (itemStack.getMaxDamage() - itemStack.getDamage() <= 11)) continue; |
<<<<<<<
import net.minecraft.client.util.math.MatrixStack;
import net.minecraft.client.util.math.Vector3f;
=======
>>>>>>>
import net.minecraft.client.util.math.MatrixStack;
import net.minecraft.client.util.math.Vector3f;
<<<<<<<
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.math.Matrix4f;
=======
>>>>>>>
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.math.Matrix4f;
<<<<<<<
private void invertBobViewWhenHurt(float f) {
if (this.client.getCameraEntity() instanceof LivingEntity) {
LivingEntity livingEntity = (LivingEntity)this.client.getCameraEntity();
float g = (float)livingEntity.hurtTime - f;
float i;
Matrix4f m;
if (livingEntity.getHealth() <= 0.0F) {
i = Math.min((float)livingEntity.deathTime + f, 20.0F);
m = new Matrix4f(Vector3f.POSITIVE_Z.getDegreesQuaternion(40.0F - 8000.0F / (i + 200.0F)));
m.invert();
RenderSystem.multMatrix(m);
}
if (g < 0.0F) {
return;
}
g /= (float)livingEntity.maxHurtTime;
g = MathHelper.sin(g * g * g * g * 3.1415927F);
i = livingEntity.knockbackVelocity;
m = new Matrix4f(Vector3f.POSITIVE_Y.getDegreesQuaternion(-i));
m.invert();
RenderSystem.multMatrix(m);
m = new Matrix4f(Vector3f.POSITIVE_Z.getDegreesQuaternion(-g * 14.0F));
m.invert();
RenderSystem.multMatrix(m);
m = new Matrix4f(Vector3f.POSITIVE_Y.getDegreesQuaternion(i));
m.invert();
RenderSystem.multMatrix(m);
}
}
private void invertBobView(float f) {
if (client.options.bobView && client.getCameraEntity() instanceof PlayerEntity) {
PlayerEntity playerEntity = (PlayerEntity)this.client.getCameraEntity();
float g = playerEntity.horizontalSpeed - playerEntity.prevHorizontalSpeed;
float h = -(playerEntity.horizontalSpeed + g * f);
float i = MathHelper.lerp(f, playerEntity.prevStrideDistance, playerEntity.strideDistance);
RenderSystem.translated(-(MathHelper.sin(h * 3.1415927F) * i * 0.5F), -(-Math.abs(MathHelper.cos(h * 3.1415927F) * i)), 0.0D);
Matrix4f m = new Matrix4f(Vector3f.POSITIVE_Z.getDegreesQuaternion(MathHelper.sin(h * 3.1415927F) * i * 3.0F));
m.invert();
RenderSystem.multMatrix(m);
m = new Matrix4f(Vector3f.POSITIVE_X.getDegreesQuaternion(Math.abs(MathHelper.cos(h * 3.1415927F - 0.2F) * i) * 5.0F));
m.invert();
RenderSystem.multMatrix(m);
}
}
=======
>>>>>>> |
<<<<<<<
import minegame159.meteorclient.utils.Utils;
import net.minecraft.client.gui.screen.ingame.HandledScreen;
=======
import net.minecraft.client.gui.screen.ingame.ContainerScreen;
import net.minecraft.container.SlotActionType;
>>>>>>>
import net.minecraft.client.gui.screen.ingame.HandledScreen; |
<<<<<<<
if (entity instanceof EndCrystalEntity && mc.player.distanceTo(entity) < range.get()) {
=======
if (entity instanceof EnderCrystalEntity && mc.player.distanceTo(entity) < range.get() && crystalLog.get()) {
>>>>>>>
if (entity instanceof EndCrystalEntity && mc.player.distanceTo(entity) < range.get() && crystalLog.get()) { |
<<<<<<<
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.core.JacksonException;
=======
>>>>>>>
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.core.JacksonException;
<<<<<<<
* A {@link TypeDeserializer} capable of deducing polymorphic types based
* on the properties available Deduction is limited to the <i>names</i> of
* direct child properties (not their values or, consequently, any nested descendants).
* Exceptions will be thrown if not enough unique information is present to select a
* single subtype.
=======
* A {@link TypeDeserializer} capable of deducing polymorphic types based on the fields available. Deduction
* is limited to the <i>names</i> of child fields (not their values or, consequently, any nested descendants).
* Exceptions will be thrown if not enough unique information is present to select a single subtype.
* <p>
* The current deduction process <b>does not</b> support pojo-hierarchies such that the
* absence of child fields infers a parent type. That is, every deducible subtype
* MUST have some unique fields and the input data MUST contain said unique fields
* to provide a <i>positive match</i>.
>>>>>>>
* A {@link TypeDeserializer} capable of deducing polymorphic types based
* on the fields available. Deduction is limited to the <i>names</i> of
* child properties (not their values or, consequently, any nested descendants).
* Exceptions will be thrown if not enough unique information is present to select a
* single subtype.
* <p>
* The current deduction process <b>does not</b> support pojo-hierarchies such that the
* absence of child fields infers a parent type. That is, every deducible subtype
* MUST have some unique fields and the input data MUST contain said unique fields
* to provide a <i>positive match</i>.
<<<<<<<
public AsDeductionTypeDeserializer(DeserializationContext ctxt,
JavaType bt, TypeIdResolver idRes, JavaType defaultImpl,
Collection<NamedType> subtypes) {
super(bt, idRes, null, false, defaultImpl);
propertyBitIndex = new HashMap<>();
subtypeFingerprints = buildFingerprints(ctxt, subtypes);
=======
public AsDeductionTypeDeserializer(JavaType bt, TypeIdResolver idRes, JavaType defaultImpl, DeserializationConfig config, Collection<NamedType> subtypes) {
super(bt, idRes, null, false, defaultImpl, null);
fieldBitIndex = new HashMap<>();
subtypeFingerprints = buildFingerprints(config, subtypes);
>>>>>>>
public AsDeductionTypeDeserializer(DeserializationContext ctxt,
JavaType bt, TypeIdResolver idRes, JavaType defaultImpl,
Collection<NamedType> subtypes)
{
super(bt, idRes, null, false, defaultImpl, null);
propertyBitIndex = new HashMap<>();
subtypeFingerprints = buildFingerprints(ctxt, subtypes); |
<<<<<<<
private static final DimensionType THE_NETHER = ((IDimensionType) DimensionType.getOverworldDimensionType()).getNether();
private static final DimensionType THE_END = ((IDimensionType) DimensionType.getOverworldDimensionType()).getEnd();
private final SettingGroup sgGeneral = settings.getDefaultGroup();
=======
private final SettingGroup sgArmor = settings.createGroup("Armor", "armor-enabled", "Armor HUD", true);
>>>>>>>
private static final DimensionType THE_NETHER = ((IDimensionType) DimensionType.getOverworldDimensionType()).getNether();
private static final DimensionType THE_END = ((IDimensionType) DimensionType.getOverworldDimensionType()).getEnd();
private final SettingGroup sgArmor = settings.createGroup("Armor", "armor-enabled", "Armor HUD", true);
<<<<<<<
mc.getItemRenderer().renderGuiItemIcon(itemStack, x, y);
mc.getItemRenderer().renderGuiItemOverlay(mc.textRenderer, itemStack, x, y);
=======
mc.getItemRenderer().renderGuiItem(itemStack, x, y);
>>>>>>>
mc.getItemRenderer().renderGuiItem(itemStack, x, y); |
<<<<<<<
for(BlockEntity entity : mc.world.blockEntities){
if(entity instanceof BedBlockEntity && Utils.distance(entity.getPos().getX(), entity.getPos().getY(), entity.getPos().getZ(), mc.player.getX(), mc.player.getY(), mc.player.getZ()) <= breakRange.get()){
currentDamage = DamageCalcUtils.bedDamage(mc.player, Utils.vec3d(entity.getPos()));
if(currentDamage < maxDamage.get()
|| (mc.player.getHealth() + mc.player.getAbsorptionAmount() - currentDamage) < minHealth.get() || clickMode.get().equals(Mode.suicide)){
mc.player.setSneaking(false);
mc.interactionManager.interactBlock(mc.player, mc.world, Hand.MAIN_HAND, new BlockHitResult(mc.player.getPos(), Direction.UP, entity.getPos(), false));
}
=======
try {
for (BlockEntity entity : mc.world.blockEntities) {
if (entity instanceof BedBlockEntity && Math.sqrt(entity.getSquaredDistance(mc.player.getX(), mc.player.getY(), mc.player.getZ())) <= breakRange.get()) {
currentDamage = DamageCalcUtils.bedDamage(mc.player, new Vec3d(entity.getPos()));
if (currentDamage < maxDamage.get()
|| (mc.player.getHealth() + mc.player.getAbsorptionAmount() - currentDamage) < minHealth.get() || clickMode.get().equals(Mode.suicide)) {
mc.player.setSneaking(false);
mc.interactionManager.interactBlock(mc.player, mc.world, Hand.MAIN_HAND, new BlockHitResult(mc.player.getPos(), Direction.UP, entity.getPos(), false));
}
>>>>>>>
try {
for (BlockEntity entity : mc.world.blockEntities) {
if (entity instanceof BedBlockEntity && Utils.distance(entity.getPos().getX(), entity.getPos().getY(), entity.getPos().getZ(), mc.player.getX(), mc.player.getY(), mc.player.getZ()) <= breakRange.get()) {
currentDamage = DamageCalcUtils.bedDamage(mc.player, Utils.vec3d(entity.getPos()));
if (currentDamage < maxDamage.get()
|| (mc.player.getHealth() + mc.player.getAbsorptionAmount() - currentDamage) < minHealth.get() || clickMode.get().equals(Mode.suicide)) {
mc.player.setSneaking(false);
mc.interactionManager.interactBlock(mc.player, mc.world, Hand.MAIN_HAND, new BlockHitResult(mc.player.getPos(), Direction.UP, entity.getPos(), false));
} |
<<<<<<<
import net.minecraft.client.util.ChatMessages;
=======
>>>>>>>
import net.minecraft.client.util.ChatMessages;
<<<<<<<
@Shadow public abstract void addMessage(Text message);
@Shadow private boolean hasUnreadNewMessages;
@Inject(at = @At("HEAD"), method = "addMessage(Lnet/minecraft/text/StringRenderable;IIZ)V", cancellable = true)
private void onAddMessage(StringRenderable stringRenderable, int messageId, int timestamp, boolean bl, CallbackInfo info) {
info.cancel();
=======
@Inject(at = @At("HEAD"), method = "addMessage(Lnet/minecraft/text/Text;IIZ)V", cancellable = true)
private void onAddMessage(Text message, int messageId, int timestamp, boolean bl, CallbackInfo info) {
// Ignore players
for (String name : Ignore.ignoredPlayers) {
if (message.toString().contains("<" + name + ">")) {
info.cancel();
return;
}
}
>>>>>>>
@Inject(at = @At("HEAD"), method = "addMessage(Lnet/minecraft/text/StringRenderable;IIZ)V", cancellable = true)
private void onAddMessage(StringRenderable stringRenderable, int messageId, int timestamp, boolean bl, CallbackInfo info) {
// Ignore players
for (String name : Ignore.ignoredPlayers) {
if (message.toString().contains("<" + name + ">")) {
info.cancel();
return;
}
}
<<<<<<<
// Normal things
if (messageId != 0) {
this.removeMessage(messageId);
}
int i = MathHelper.floor((double)this.getWidth() / this.getChatScale());
List<StringRenderable> list = ChatMessages.breakRenderedChatMessageLines(stringRenderable, i, this.client.textRenderer);
boolean bl2 = this.isChatFocused();
StringRenderable stringRenderable2;
for(Iterator var8 = list.iterator(); var8.hasNext(); this.visibleMessages.add(0, new ChatHudLine(timestamp, stringRenderable2, messageId))) {
stringRenderable2 = (StringRenderable)var8.next();
if (bl2 && this.scrolledLines > 0) {
this.hasUnreadNewMessages = true;
this.scroll(1.0D);
}
}
while(this.visibleMessages.size() > ModuleManager.INSTANCE.get(LongerChat.class).getMaxLineCount()) {
this.visibleMessages.remove(this.visibleMessages.size() - 1);
}
if (!bl) {
this.messages.add(0, new ChatHudLine(timestamp, stringRenderable, messageId));
while(this.messages.size() > ModuleManager.INSTANCE.get(LongerChat.class).getMaxLineCount()) {
this.messages.remove(this.messages.size() - 1);
}
}
=======
>>>>>>> |
<<<<<<<
for (int j = 9; j < 45; j++) {
if (mc.player.inventory.getStack(j).getItem() == stack.getItem()) {
=======
for (int j = 9; j < mc.player.inventory.main.size(); j++) {
if (mc.player.inventory.getInvStack(j).getItem() == stack.getItem()) {
>>>>>>>
for (int j = 9; j < mc.player.inventory.main.size(); j++) {
if (mc.player.inventory.getStack(j).getItem() == stack.getItem()) {
<<<<<<<
for (int i = 9; i < 45; i++) {
if (mc.player.inventory.getStack(i).getItem() == stack.getItem()) {
=======
for (int i = 9; i < mc.player.inventory.main.size(); i++) {
if (mc.player.inventory.getInvStack(i).getItem() == stack.getItem()) {
>>>>>>>
for (int i = 9; i < mc.player.inventory.main.size(); i++) {
if (mc.player.inventory.getStack(i).getItem() == stack.getItem()) { |
<<<<<<<
import minegame159.meteorclient.utils.Utils;
import net.minecraft.client.gui.screen.ingame.HandledScreen;
=======
import net.minecraft.client.gui.screen.ingame.ContainerScreen;
import net.minecraft.container.SlotActionType;
>>>>>>>
import net.minecraft.client.gui.screen.ingame.HandledScreen;
<<<<<<<
private Listener<TickEvent> onTick = new Listener<>(event -> {
if (mc.currentScreen instanceof HandledScreen<?>) return;
=======
private final Listener<TickEvent> onTick = new Listener<>(event -> {
if (mc.currentScreen instanceof ContainerScreen<?>) return;
>>>>>>>
private final Listener<TickEvent> onTick = new Listener<>(event -> {
if (mc.currentScreen instanceof HandledScreen<?>) return; |
<<<<<<<
import net.minecraft.client.util.math.MatrixStack;
=======
import net.minecraft.item.ItemStack;
import net.minecraft.item.Items;
>>>>>>>
import net.minecraft.item.ItemStack;
import net.minecraft.item.Items;
import net.minecraft.client.util.math.MatrixStack;
<<<<<<<
private void onBobViewWhenHurt(MatrixStack matrixStack, float f, CallbackInfo info) {
if (ModuleManager.INSTANCE.isActive(NoHurtCam.class)) info.cancel();
=======
private void onBobViewWhenHurt(float tickDelta, CallbackInfo info) {
if (ModuleManager.INSTANCE.get(NoRender.class).noHurtCam()) info.cancel();
}
@Inject(method = "renderWeather", at = @At("HEAD"), cancellable = true)
private void onRenderWeather(float f, CallbackInfo info) {
if (ModuleManager.INSTANCE.get(NoRender.class).noWeather()) info.cancel();
}
@Inject(method = "renderRain", at = @At("HEAD"), cancellable = true)
private void onRenderRain(CallbackInfo info) {
if (ModuleManager.INSTANCE.get(NoRender.class).noWeather()) info.cancel();
}
@Inject(method = "showFloatingItem", at = @At("HEAD"), cancellable = true)
private void onShowFloatingItem(ItemStack floatingItem, CallbackInfo info) {
if (floatingItem.getItem() == Items.TOTEM_OF_UNDYING && ModuleManager.INSTANCE.get(NoRender.class).noTotem()) {
info.cancel();
}
>>>>>>>
private void onBobViewWhenHurt(MatrixStack matrixStack, float f, CallbackInfo info) {
if (ModuleManager.INSTANCE.get(NoRender.class).noHurtCam()) info.cancel();
}
@Inject(method = "renderWeather", at = @At("HEAD"), cancellable = true)
private void onRenderWeather(float f, CallbackInfo info) {
if (ModuleManager.INSTANCE.get(NoRender.class).noWeather()) info.cancel();
}
@Inject(method = "renderRain", at = @At("HEAD"), cancellable = true)
private void onRenderRain(CallbackInfo info) {
if (ModuleManager.INSTANCE.get(NoRender.class).noWeather()) info.cancel();
}
@Inject(method = "showFloatingItem", at = @At("HEAD"), cancellable = true)
private void onShowFloatingItem(ItemStack floatingItem, CallbackInfo info) {
if (floatingItem.getItem() == Items.TOTEM_OF_UNDYING && ModuleManager.INSTANCE.get(NoRender.class).noTotem()) {
info.cancel();
} |
<<<<<<<
if (MC.currentScreen == null) return;
GenericContainerScreenHandler container = ((GenericContainerScreen) MC.currentScreen).getScreenHandler();
=======
if (!(MC.currentScreen instanceof GenericContainerScreen)) return;
GenericContainer container = ((GenericContainerScreen) MC.currentScreen).getContainer();
>>>>>>>
if (!(MC.currentScreen instanceof GenericContainerScreen)) return;
GenericContainerScreenHandler container = ((GenericContainerScreen) MC.currentScreen).getScreenHandler(); |
<<<<<<<
mc.getItemRenderer().renderGuiItemIcon(itemStack, x, y);
if(!itemStack.isEmpty()) {
=======
mc.getItemRenderer().renderGuiItem(itemStack, x, y);
if(!itemStack.isEmpty() && itemStack.isDamageable()) {
>>>>>>>
mc.getItemRenderer().renderGuiItemIcon(itemStack, x, y);
if(!itemStack.isEmpty() && itemStack.isDamageable()) {
<<<<<<<
mc.getItemRenderer().renderGuiItemIcon(itemStack, x, y);
if(!itemStack.isEmpty()) {
String message = Integer.toString(Math.round(((itemStack.getMaxDamage() - itemStack.getDamage()) * 100) / itemStack.getMaxDamage()));
=======
mc.getItemRenderer().renderGuiItem(itemStack, x, y);
if(!itemStack.isEmpty() && itemStack.isDamageable()) {
String message = Integer.toString(Math.round(((itemStack.getMaxDamage() - itemStack.getDamage()) * 100f) / (float) itemStack.getMaxDamage()));
>>>>>>>
mc.getItemRenderer().renderGuiItemIcon(itemStack, x, y);
if(!itemStack.isEmpty() && itemStack.isDamageable()) {
String message = Integer.toString(Math.round(((itemStack.getMaxDamage() - itemStack.getDamage()) * 100f) / (float) itemStack.getMaxDamage())); |
<<<<<<<
public static final Events Events = com.backendless.Events.getInstance();
=======
public static final Commerce Commerce = com.backendless.Commerce.getInstance();
>>>>>>>
public static final Events Events = com.backendless.Events.getInstance();
public static final Commerce Commerce = com.backendless.Commerce.getInstance(); |
<<<<<<<
if (a instanceof AnnotatedConstructor) {
if (_java7Helper != null) {
Boolean b = _java7Helper.hasCreatorAnnotation(a);
if (b != null) {
return b.booleanValue();
=======
// 19-Apr-2016, tatu: As per [databind#1197], [databind#1122] (and some related),
// may or may not consider it a creator
if (_cfgConstructorPropertiesImpliesCreator ) {
if (a instanceof AnnotatedConstructor) {
if (_jdk7Helper != null) {
Boolean b = _jdk7Helper.hasCreatorAnnotation(a);
if (b != null) {
return b.booleanValue();
}
>>>>>>>
// 19-Apr-2016, tatu: As per [databind#1197], [databind#1122] (and some related),
// may or may not consider it a creator
if (_cfgConstructorPropertiesImpliesCreator ) {
if (a instanceof AnnotatedConstructor) {
if (_java7Helper != null) {
Boolean b = _java7Helper.hasCreatorAnnotation(a);
if (b != null) {
return b.booleanValue();
} |
<<<<<<<
=======
initialFill();
}
private void initialFill()
{
addHeader( HeadersManager.HeadersEnum.APP_ID_NAME, Backendless.getApplicationId() );
addHeader( HeadersManager.HeadersEnum.SECRET_KEY_NAME, Backendless.getSecretKey() );
>>>>>>>
initialFill();
}
private void initialFill()
{ |
<<<<<<<
String implName = ai.findImplicitPropertyName(_config, f);
=======
String implName = ai.findImplicitPropertyName(f);
>>>>>>>
String implName = ai.findImplicitPropertyName(_config, f);
<<<<<<<
/* 18-Aug-2011, tatu: As per existing unit tests, we should only
* use serialization annotation (@JsonSerialize) when serializing
* fields, and similarly for deserialize-only annotations... so
* no fallbacks in this particular case.
*/
pn = ai.findNameForSerialization(_config, f);
=======
// 18-Aug-2011, tatu: As per existing unit tests, we should only
// use serialization annotation (@JsonSerialize) when serializing
// fields, and similarly for deserialize-only annotations... so
// no fallbacks in this particular case.
pn = ai.findNameForSerialization(f);
>>>>>>>
// 18-Aug-2011, tatu: As per existing unit tests, we should only
// use serialization annotation (@JsonSerialize) when serializing
// fields, and similarly for deserialize-only annotations... so
// no fallbacks in this particular case.
pn = ai.findNameForSerialization(_config, f); |
<<<<<<<
public Map<String, Object> loadMetadata( final String pointId )
{
BackendlessGeoQuery query = null;
if( pointId.matches( "^[1-9]\\d*$" ) )
{
query = Backendless.Cache.get( Messaging.DEVICE_ID + "." + BackendlessGeoQuery.class.getSimpleName(), BackendlessGeoQuery.class );
}
return (Map<String, Object>) Invoker.invokeSync( GEO_MANAGER_SERVER_ALIAS, "loadMetadata", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), pointId, query } );
}
public void loadMetadata( final String pointId, final AsyncCallback<Map<String, Object>> responder )
{
BackendlessGeoQuery query = null;
if( pointId.matches( "^[1-9]\\d*$" ) )
{
query = Backendless.Cache.get( Messaging.DEVICE_ID + "." + BackendlessGeoQuery.class.getSimpleName(), BackendlessGeoQuery.class );
}
Invoker.invokeAsync( GEO_MANAGER_SERVER_ALIAS, "loadMetadata", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), pointId, query }, new AsyncCallback<Map<String, Object>>()
{
@Override
public void handleResponse( Map<String, Object> response )
{
if( responder != null )
responder.handleResponse( response );
}
@Override
public void handleFault( BackendlessFault fault )
{
if( responder != null )
responder.handleFault( fault );
}
} );
}
=======
private void setReferenceToCluster(BackendlessCollection<GeoPoint> collection){
for( GeoPoint geoPoint : collection.getData() )
{
if(geoPoint instanceof GeoCluster){
((GeoCluster)geoPoint).setBackendlessGeoQuery( (BackendlessGeoQuery) collection.getQuery() );
}
}
}
>>>>>>>
public Map<String, Object> loadMetadata( final String pointId )
{
BackendlessGeoQuery query = null;
if( pointId.matches( "^[1-9]\\d*$" ) )
{
query = Backendless.Cache.get( Messaging.DEVICE_ID + "." + BackendlessGeoQuery.class.getSimpleName(), BackendlessGeoQuery.class );
}
return (Map<String, Object>) Invoker.invokeSync( GEO_MANAGER_SERVER_ALIAS, "loadMetadata", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), pointId, query } );
}
public void loadMetadata( final String pointId, final AsyncCallback<Map<String, Object>> responder )
{
BackendlessGeoQuery query = null;
if( pointId.matches( "^[1-9]\\d*$" ) )
{
query = Backendless.Cache.get( Messaging.DEVICE_ID + "." + BackendlessGeoQuery.class.getSimpleName(), BackendlessGeoQuery.class );
}
Invoker.invokeAsync( GEO_MANAGER_SERVER_ALIAS, "loadMetadata", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), pointId, query }, new AsyncCallback<Map<String, Object>>()
{
@Override
public void handleResponse( Map<String, Object> response )
{
if( responder != null )
responder.handleResponse( response );
}
@Override
public void handleFault( BackendlessFault fault )
{
if( responder != null )
responder.handleFault( fault );
}
} );
}
private void setReferenceToCluster(BackendlessCollection<GeoPoint> collection){
for( GeoPoint geoPoint : collection.getData() )
{
if(geoPoint instanceof GeoCluster){
((GeoCluster)geoPoint).setBackendlessGeoQuery( (BackendlessGeoQuery) collection.getQuery() );
}
}
} |
<<<<<<<
=======
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
>>>>>>>
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
<<<<<<<
=======
import org.apache.commons.math.ode.DerivativeException;
>>>>>>>
<<<<<<<
private String seriesData;
private double period;
@Override
protected void initialize(Module module, String name, JsonObject definition) {
super.initialize(module, name, definition);
=======
private ThreadLocal<ExpressionProcessor> getExpProcessor() {
>>>>>>>
private String seriesData;
private double period;
private ThreadLocal<ExpressionProcessor> getExpProcessor() {
<<<<<<<
if (threadExpProcessor.get() != null) {
value = threadExpProcessor.get().evaluate(person, time);
} else if (seriesData != null) {
String[] items = seriesData.split(" ");
TimeSeriesData data = new TimeSeriesData(items.length, period);
for (int i = 0; i < items.length; i++) {
try {
data.addValue(Double.parseDouble(items[i]));
} catch (NumberFormatException nfe) {
throw new RuntimeException("unable to parse \"" + items[i]
+ "\" in SetAttribute state for \"" + attribute + "\"", nfe);
}
}
value = data;
}
=======
ThreadLocal<ExpressionProcessor> expProcessor = getExpProcessor();
if (expProcessor.get() != null) {
value = expProcessor.get().evaluate(person, time);
}
>>>>>>>
ThreadLocal<ExpressionProcessor> expProcessor = getExpProcessor();
if (expProcessor.get() != null) {
value = expProcessor.get().evaluate(person, time);
} else if (seriesData != null) {
String[] items = seriesData.split(" ");
TimeSeriesData data = new TimeSeriesData(items.length, period);
for (int i = 0; i < items.length; i++) {
try {
data.addValue(Double.parseDouble(items[i]));
} catch (NumberFormatException nfe) {
throw new RuntimeException("unable to parse \"" + items[i]
+ "\" in SetAttribute state for \"" + attribute + "\"", nfe);
}
}
value = data;
}
<<<<<<<
if (expression != null) {
threadExpProcessor.set(new ExpressionProcessor(expression));
}
// If there's an attachment, validate it before we process
if (attachment != null) {
attachment.validate();
=======
if (this.expression != null && threadExpProcessor.get() == null) {
threadExpProcessor.set(new ExpressionProcessor(this.expression));
>>>>>>>
if (expression != null && threadExpProcessor.get() == null) {
threadExpProcessor.set(new ExpressionProcessor(expression));
}
// If there's an attachment, validate it before we process
if (attachment != null) {
attachment.validate();
<<<<<<<
} else if (threadExpProcessor.get() != null) {
value = threadExpProcessor.get().evaluate(person, time);
} else if (sampledData != null) {
// Capture the data lists from person attributes
sampledData.setSeriesData(person);
value = new SampledData(sampledData);
} else if (attachment != null) {
attachment.process(person);
value = new Attachment(attachment);
}
=======
} else if (getExpProcessor().get() != null) {
value = getExpProcessor().get().evaluate(person, time);
}
>>>>>>>
} else if (threadExpProcessor != null
&& threadExpProcessor.get() != null) {
value = threadExpProcessor.get().evaluate(person, time);
} else if (sampledData != null) {
// Capture the data lists from person attributes
sampledData.setSeriesData(person);
value = new SampledData(sampledData);
} else if (attachment != null) {
attachment.process(person);
value = new Attachment(attachment);
} |
<<<<<<<
=======
/**
* Add an observation to the encounter. In this case, no codes are added to the observation.
* It appears that some code in Synthea likes it this way (and does not like good old OO-style
* encapsulation).
* @param time The time of the observation
* @param type The type of the observation
* @param value The observation value
* @return The newly created observation.
*/
public Observation addObservation(long time, String type, Object value) {
Observation observation = new Observation(time, type, value);
this.observations.add(observation);
return observation;
}
/**
* Add an observation to the encounter and uses the type to set the first code.
* @param time The time of the observation
* @param type The LOINC code for the observation
* @param value The observation value
* @param display The display text for the first code
* @return The newly created observation.
*/
public Observation addObservation(long time, String type, Object value, String display) {
Observation observation = new Observation(time, type, value);
this.observations.add(observation);
observation.codes.add(new Code("LOINC", type, display));
return observation;
}
/**
* Find the first observation in the encounter with the given LOINC code.
* @param code The LOINC code to look for
* @return A single observation or null
*/
public Observation findObservation(String code) {
return observations
.stream()
.filter(o -> o.type.equals(code))
.findFirst()
.orElse(null);
}
/**
* Find the encounter that happened before this one.
* @return The previous encounter or null if this is the first
*/
public Encounter previousEncounter() {
if (record.encounters.size() < 2) {
return null;
} else {
int index = record.encounters.indexOf(this);
if (index == 0) {
return null;
} else {
return record.encounters.get(index - 1);
}
}
}
}
>>>>>>>
<<<<<<<
/**
* Create a text summary of the health record containing counts of each time of entry.
* @return text summary.
*/
=======
public int providerCount() {
List<String> uuids = new ArrayList<String>();
for (Encounter enc : encounters) {
if (enc.provider != null) {
uuids.add(enc.provider.uuid);
}
}
Set<String> uniqueUuids = new HashSet<String>(uuids);
return uniqueUuids.size();
}
>>>>>>>
public int providerCount() {
List<String> uuids = new ArrayList<String>();
for (Encounter enc : encounters) {
if (enc.provider != null) {
uuids.add(enc.provider.uuid);
}
}
Set<String> uniqueUuids = new HashSet<String>(uuids);
return uniqueUuids.size();
}
/**
* Create a text summary of the health record containing counts of each time of entry.
* @return text summary.
*/ |
<<<<<<<
if (!person.alive(time)) {
return true;
}
=======
// If the payerHistory at the current age is null, they must get insurance for the new year.
// Note: This means the person will check to change insurance yearly, just after their
// birthday.
if (person.getPayerAtTime(time) == null) {
// Update their last payer with person's QOLS for that year.
if (person.getPreviousPayerAtTime(time) != null) {
person.getPreviousPayerAtTime(time).addQols(
person.getQolsForYear(Utilities.getYear(time) - 1));
}
>>>>>>>
if (!person.alive(time)) {
return true;
}
// If the payerHistory at the current age is null, they must get insurance for the new year.
// Note: This means the person will check to change insurance yearly, just after their
// birthday.
if (person.getPayerAtTime(time) == null) {
// Update their last payer with person's QOLS for that year.
if (person.getPreviousPayerAtTime(time) != null) {
person.getPreviousPayerAtTime(time).addQols(
person.getQolsForYear(Utilities.getYear(time) - 1));
} |
<<<<<<<
import java.util.stream.Collectors;
import org.hl7.fhir.r4.model.Address;
import org.hl7.fhir.r4.model.AllergyIntolerance;
=======
import org.hl7.fhir.r4.model.*;
>>>>>>>
import java.util.stream.Collectors;
import org.hl7.fhir.r4.model.Address;
import org.hl7.fhir.r4.model.AllergyIntolerance;
import org.hl7.fhir.r4.model.*;
<<<<<<<
import org.hl7.fhir.r4.model.HumanName;
import org.hl7.fhir.r4.model.Identifier;
import org.hl7.fhir.r4.model.Identifier.IdentifierUse;
=======
import org.hl7.fhir.r4.model.Identifier.IdentifierUse;
>>>>>>>
import org.hl7.fhir.r4.model.HumanName;
import org.hl7.fhir.r4.model.Identifier;
import org.hl7.fhir.r4.model.Identifier.IdentifierUse;
<<<<<<<
import org.hl7.fhir.r4.model.Quantity;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.Resource;
import org.hl7.fhir.r4.model.ServiceRequest;
import org.hl7.fhir.r4.model.SimpleQuantity;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.SupplyDelivery;
import org.hl7.fhir.r4.model.SupplyDelivery.SupplyDeliveryStatus;
import org.hl7.fhir.r4.model.SupplyDelivery.SupplyDeliverySuppliedItemComponent;
import org.hl7.fhir.r4.model.Timing;
=======
>>>>>>>
import org.hl7.fhir.r4.model.Quantity;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.Resource;
import org.hl7.fhir.r4.model.ServiceRequest;
import org.hl7.fhir.r4.model.SimpleQuantity;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.SupplyDelivery;
import org.hl7.fhir.r4.model.SupplyDelivery.SupplyDeliveryStatus;
import org.hl7.fhir.r4.model.SupplyDelivery.SupplyDeliverySuppliedItemComponent;
import org.hl7.fhir.r4.model.Timing;
<<<<<<<
if (USE_US_CORE_IG) {
// We do not yet account for mixed race
Extension raceExtension = new Extension(
"http://hl7.org/fhir/us/core/StructureDefinition/us-core-race");
String race = (String) person.attributes.get(Person.RACE);
String raceDisplay;
switch (race) {
case "white":
raceDisplay = "White";
break;
case "black":
raceDisplay = "Black or African American";
break;
case "asian":
raceDisplay = "Asian";
break;
case "native":
raceDisplay = "American Indian or Alaska Native";
break;
default: // Other (Put Hawaiian and Pacific Islander here for now)
raceDisplay = "Other";
break;
}
=======
if (person.attributes.get(Person.IDENTIFIER_RECORD_ID) != null) {
Code siteCode = new Code("http://codi.mitre.org", (String) person.attributes.get(Person.IDENTIFIER_SITE), "Synthetic Denver List ID");
patientResource.addIdentifier()
.setType(mapCodeToCodeableConcept(siteCode, "http://codi.mitre.org"))
.setSystem("http://codi.mitre.org")
.setValue((String) person.attributes.get(Person.IDENTIFIER_RECORD_ID));
}
if (person.attributes.get(Person.LINK_ID) != null) {
patientResource.addIdentifier()
.setSystem("http://codi.mitre.org/link_id")
.setValue(person.attributes.get(Person.LINK_ID).toString());
}
if (person.attributes.get(Person.CONTACT_EMAIL) != null) {
ContactComponent contact = new ContactComponent();
HumanName contactName = new HumanName();
contactName.setUse(HumanName.NameUse.OFFICIAL);
contactName.addGiven((String) person.attributes.get(Person.CONTACT_GIVEN_NAME));
contactName.setFamily((String) person.attributes.get(Person.CONTACT_FAMILY_NAME));
contact.setName(contactName);
contact.addTelecom().setSystem(ContactPointSystem.EMAIL)
.setUse(ContactPointUse.HOME)
.setValue((String) person.attributes.get(Person.CONTACT_EMAIL));
patientResource.addContact(contact);
}
// We do not yet account for mixed race
Extension raceExtension = new Extension(
"http://hl7.org/fhir/us/core/StructureDefinition/us-core-race");
String race = (String) person.attributes.get(Person.RACE);
String raceDisplay;
switch (race) {
case "white":
raceDisplay = "White";
break;
case "black":
raceDisplay = "Black or African American";
break;
case "asian":
raceDisplay = "Asian";
break;
case "native":
raceDisplay = "American Indian or Alaska Native";
break;
default: // Other (Put Hawaiian and Pacific Islander here for now)
raceDisplay = "Other";
break;
}
String raceNum = (String) raceEthnicityCodes.get(race);
Extension raceCodingExtension = new Extension("ombCategory");
Coding raceCoding = new Coding();
if (raceDisplay.equals("Other")) {
raceCoding.setSystem("http://terminology.hl7.org/CodeSystem/v3-NullFlavor");
raceCoding.setCode("UNK");
raceCoding.setDisplay("Unknown");
} else {
raceCoding.setSystem("urn:oid:2.16.840.1.113883.6.238");
raceCoding.setCode(raceNum);
raceCoding.setDisplay(raceDisplay);
}
raceCodingExtension.setValue(raceCoding);
raceExtension.addExtension(raceCodingExtension);
Extension raceTextExtension = new Extension("text");
raceTextExtension.setValue(new StringType(raceDisplay));
raceExtension.addExtension(raceTextExtension);
patientResource.addExtension(raceExtension);
// We do not yet account for mixed ethnicity
Extension ethnicityExtension = new Extension(
"http://hl7.org/fhir/us/core/StructureDefinition/us-core-ethnicity");
String ethnicity = (String) person.attributes.get(Person.ETHNICITY);
String ethnicityDisplay;
if (ethnicity.equals("hispanic")) {
ethnicity = "hispanic";
ethnicityDisplay = "Hispanic or Latino";
} else {
ethnicity = "nonhispanic";
ethnicityDisplay = "Not Hispanic or Latino";
}
String ethnicityNum = (String) raceEthnicityCodes.get(ethnicity);
>>>>>>>
if (person.attributes.get(Person.IDENTIFIER_RECORD_ID) != null) {
Code siteCode = new Code("http://codi.mitre.org", (String) person.attributes.get(Person.IDENTIFIER_SITE), "Synthetic Denver List ID");
patientResource.addIdentifier()
.setType(mapCodeToCodeableConcept(siteCode, "http://codi.mitre.org"))
.setSystem("http://codi.mitre.org")
.setValue((String) person.attributes.get(Person.IDENTIFIER_RECORD_ID));
}
if (person.attributes.get(Person.LINK_ID) != null) {
patientResource.addIdentifier()
.setSystem("http://codi.mitre.org/link_id")
.setValue(person.attributes.get(Person.LINK_ID).toString());
}
if (person.attributes.get(Person.CONTACT_EMAIL) != null) {
ContactComponent contact = new ContactComponent();
HumanName contactName = new HumanName();
contactName.setUse(HumanName.NameUse.OFFICIAL);
contactName.addGiven((String) person.attributes.get(Person.CONTACT_GIVEN_NAME));
contactName.setFamily((String) person.attributes.get(Person.CONTACT_FAMILY_NAME));
contact.setName(contactName);
contact.addTelecom().setSystem(ContactPointSystem.EMAIL)
.setUse(ContactPointUse.HOME)
.setValue((String) person.attributes.get(Person.CONTACT_EMAIL));
patientResource.addContact(contact);
}
if (USE_US_CORE_IG) {
// We do not yet account for mixed race
Extension raceExtension = new Extension(
"http://hl7.org/fhir/us/core/StructureDefinition/us-core-race");
String race = (String) person.attributes.get(Person.RACE);
String raceDisplay;
switch (race) {
case "white":
raceDisplay = "White";
break;
case "black":
raceDisplay = "Black or African American";
break;
case "asian":
raceDisplay = "Asian";
break;
case "native":
raceDisplay = "American Indian or Alaska Native";
break;
default: // Other (Put Hawaiian and Pacific Islander here for now)
raceDisplay = "Other";
break;
} |
<<<<<<<
/**
* Copy-constructor that is useful for sub-classes that just want to
* copy all super-class properties without modifications.
*/
protected BeanSerializerBase(BeanSerializerBase src) {
this(src, src._props, src._filteredProps);
}
public BeanSerializerBase(BeanSerializerBase src,
=======
protected BeanSerializerBase(BeanSerializerBase src,
>>>>>>>
/**
* Copy-constructor that is useful for sub-classes that just want to
* copy all super-class properties without modifications.
*/
protected BeanSerializerBase(BeanSerializerBase src) {
this(src, src._props, src._filteredProps);
}
protected BeanSerializerBase(BeanSerializerBase src,
<<<<<<<
=======
/**
* Mutant factory used for creating a new instance with modified set
* of properties
*
* @since 2.11.1
*/
protected abstract BeanSerializerBase withProperties(BeanPropertyWriter[] properties,
BeanPropertyWriter[] filteredProperties);
>>>>>>>
/**
* Mutant factory used for creating a new instance with modified set
* of properties
*/
protected abstract BeanSerializerBase withProperties(BeanPropertyWriter[] properties,
BeanPropertyWriter[] filteredProperties);
<<<<<<<
objectIdInfo = intr.findObjectReferenceInfo(config, accessor, objectIdInfo);
ObjectIdGenerator<?> gen;
=======
objectIdInfo = intr.findObjectReferenceInfo(accessor, objectIdInfo);
>>>>>>>
objectIdInfo = intr.findObjectReferenceInfo(config, accessor, objectIdInfo);
<<<<<<<
gen = ctxt.objectIdGeneratorInstance(accessor, objectIdInfo);
=======
ObjectIdGenerator<?> gen = provider.objectIdGeneratorInstance(accessor, objectIdInfo);
>>>>>>>
ObjectIdGenerator<?>gen = ctxt.objectIdGeneratorInstance(accessor, objectIdInfo); |
<<<<<<<
@Test
public void testAppWithModuleFilter() throws Exception {
TestHelper.exportOff();
Config.set("test_key", "pre-test value");
String[] args = {"-s", "0", "-p", "0", "-m", "copd" + pathSeparator + "allerg*"};
final PrintStream original = System.out;
final ByteArrayOutputStream out = new ByteArrayOutputStream();
final PrintStream print = new PrintStream(out, true);
System.setOut(print);
App.main(args);
out.flush();
String output = out.toString();
Assert.assertTrue(output.contains("Running with options:"));
Assert.assertTrue(output.contains("Seed:"));
Assert.assertTrue(output.contains("Modules:"));
Assert.assertTrue(output.contains("COPD Module"));
Assert.assertTrue(output.contains("Allergic"));
Assert.assertTrue(output.contains("Allergies"));
Assert.assertFalse(output.contains("asthma"));
System.setOut(original);
}
=======
@Test
public void testAppWithOverflow() throws Exception {
TestHelper.exportOff();
String[] args = {"-s", "1", "-p", "3", "-o", "false"};
final PrintStream original = System.out;
final ByteArrayOutputStream out = new ByteArrayOutputStream();
final PrintStream print = new PrintStream(out, true);
System.setOut(print);
App.main(args);
out.flush();
String output = out.toString();
Assert.assertTrue(output.contains("Running with options:"));
Assert.assertTrue(output.contains("Seed:"));
String regex = "\\{alive=(\\d+), dead=(\\d+)\\}";
Matcher matches = Pattern.compile(regex).matcher(output);
Assert.assertTrue(matches.find());
int alive = Integer.parseInt(matches.group(1));
int dead = Integer.parseInt(matches.group(2));
Assert.assertEquals(alive + dead, 3);
System.setOut(original);
}
>>>>>>>
@Test
public void testAppWithOverflow() throws Exception {
TestHelper.exportOff();
String[] args = {"-s", "1", "-p", "3", "-o", "false"};
final PrintStream original = System.out;
final ByteArrayOutputStream out = new ByteArrayOutputStream();
final PrintStream print = new PrintStream(out, true);
System.setOut(print);
App.main(args);
out.flush();
String output = out.toString();
Assert.assertTrue(output.contains("Running with options:"));
Assert.assertTrue(output.contains("Seed:"));
String regex = "\\{alive=(\\d+), dead=(\\d+)\\}";
Matcher matches = Pattern.compile(regex).matcher(output);
Assert.assertTrue(matches.find());
int alive = Integer.parseInt(matches.group(1));
int dead = Integer.parseInt(matches.group(2));
Assert.assertEquals(alive + dead, 3);
System.setOut(original);
}
public void testAppWithModuleFilter() throws Exception {
TestHelper.exportOff();
Config.set("test_key", "pre-test value");
String[] args = {"-s", "0", "-p", "0", "-m", "copd" + pathSeparator + "allerg*"};
final PrintStream original = System.out;
final ByteArrayOutputStream out = new ByteArrayOutputStream();
final PrintStream print = new PrintStream(out, true);
System.setOut(print);
App.main(args);
out.flush();
String output = out.toString();
Assert.assertTrue(output.contains("Running with options:"));
Assert.assertTrue(output.contains("Seed:"));
Assert.assertTrue(output.contains("Modules:"));
Assert.assertTrue(output.contains("COPD Module"));
Assert.assertTrue(output.contains("Allergic"));
Assert.assertTrue(output.contains("Allergies"));
Assert.assertFalse(output.contains("asthma"));
System.setOut(original);
} |
<<<<<<<
=======
import java.util.List;
>>>>>>> |
<<<<<<<
import java.util.ArrayList;
=======
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
>>>>>>>
import java.util.ArrayList;
import java.util.HashMap;
<<<<<<<
import org.mitre.synthea.modules.QualityOfLifeModule;
import org.mitre.synthea.modules.WeightLossModule;
=======
import org.mitre.synthea.world.agents.Payer;
>>>>>>>
import org.mitre.synthea.modules.QualityOfLifeModule;
import org.mitre.synthea.modules.WeightLossModule;
import org.mitre.synthea.world.agents.Payer;
<<<<<<<
time = System.currentTimeMillis();
=======
>>>>>>>
time = System.currentTimeMillis();
<<<<<<<
=======
person.events.create(birthTime, Event.BIRTH, "Generator.run", true);
Payer.loadPayers(new Location("Massachusetts", null));
person.setPayerAtTime(time, Payer.noInsurance);
>>>>>>>
Payer.loadPayers(new Location("Massachusetts", null));
person.setPayerAtTime(time, Payer.noInsurance); |
<<<<<<<
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
=======
>>>>>>>
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
<<<<<<<
import org.mitre.synthea.helpers.Config;
=======
import org.mitre.synthea.TestHelper;
>>>>>>>
import org.mitre.synthea.helpers.Config;
import org.mitre.synthea.TestHelper; |
<<<<<<<
long encounter_end = encounter.stop > 0 ? encounter.stop : encounter.stop + TimeUnit.MINUTES.toMillis(15);
=======
long encounter_end = encounter.stop > 0 ? encounter.stop : encounter.start + TimeUnit.MINUTES.toMillis(15);
>>>>>>>
long encounter_end = encounter.stop > 0 ? encounter.stop : encounter.start + TimeUnit.MINUTES.toMillis(15);
<<<<<<<
=======
>>>>>>>
<<<<<<<
claimResource.addItem(new org.hl7.fhir.dstu3.model.Claim.ItemComponent().addEncounter(new Reference(encounterEntry.getFullUrl())));
=======
claimResource.addItem(new org.hl7.fhir.dstu3.model.Claim.ItemComponent(new PositiveIntType(1)).addEncounter(new Reference(encounterEntry.getFullUrl())));
>>>>>>>
claimResource.addItem(new org.hl7.fhir.dstu3.model.Claim.ItemComponent(new PositiveIntType(1)).addEncounter(new Reference(encounterEntry.getFullUrl())));
<<<<<<<
claimResource.addItem(new org.hl7.fhir.dstu3.model.Claim.ItemComponent().addEncounter(new Reference(encounterEntry.getFullUrl())));
=======
claimResource.addItem(new org.hl7.fhir.dstu3.model.Claim.ItemComponent(new PositiveIntType(1)).addEncounter(new Reference(encounterEntry.getFullUrl())));
int itemSequence = 2;
>>>>>>>
claimResource.addItem(new org.hl7.fhir.dstu3.model.Claim.ItemComponent(new PositiveIntType(1)).addEncounter(new Reference(encounterEntry.getFullUrl())));
int itemSequence = 2;
<<<<<<<
org.hl7.fhir.dstu3.model.Claim.ItemComponent diagnosisItem = new org.hl7.fhir.dstu3.model.Claim.ItemComponent();
diagnosisItem.addDiagnosisLinkId(conditionSequence);
=======
org.hl7.fhir.dstu3.model.Claim.ItemComponent diagnosisItem = new org.hl7.fhir.dstu3.model.Claim.ItemComponent(new PositiveIntType(itemSequence));
diagnosisItem.addDiagnosisLinkId(conditionSequence);
>>>>>>>
org.hl7.fhir.dstu3.model.Claim.ItemComponent diagnosisItem = new org.hl7.fhir.dstu3.model.Claim.ItemComponent(new PositiveIntType(itemSequence));
diagnosisItem.addDiagnosisLinkId(conditionSequence);
<<<<<<<
=======
procedureResource.setStatus(ProcedureStatus.COMPLETED);
>>>>>>>
procedureResource.setStatus(ProcedureStatus.COMPLETED);
<<<<<<<
=======
careplanResource.setIntent(CarePlanIntent.ORDER);
>>>>>>>
careplanResource.setIntent(CarePlanIntent.ORDER); |
<<<<<<<
=======
boolean isRHNeg = person.rand() < 0.15;
attributes.put("RH_NEG", isRHNeg);
>>>>>>>
boolean isRHNeg = person.rand() < 0.15;
attributes.put("RH_NEG", isRHNeg); |
<<<<<<<
public abstract class Exporter
{
/**
* Export a single patient, into all the formats supported.
* (Formats may be enabled or disabled by configuration)
*
* @param person Patient to export
* @param stopTime Time at which the simulation stopped
*/
public static void export(Person person, long stopTime)
{
// TODO: filter for export
if (Boolean.parseBoolean(Config.get("exporter.fhir.export")))
{
String bundleJson = FhirStu3.convertToFHIR(person, stopTime);
File outDirectory = getOutputFolder("fhir", person);
Path outFilePath = outDirectory.toPath().resolve(filename(person, "json"));
try
{
Files.write(outFilePath, Collections.singleton(bundleJson), StandardOpenOption.CREATE_NEW);
} catch (IOException e)
{
e.printStackTrace();
}
}
if (Boolean.parseBoolean(Config.get("exporter.ccda.export")))
{
String ccdaXml = CCDAExporter.export(person, stopTime);
File outDirectory = getOutputFolder("ccda", person);
Path outFilePath = outDirectory.toPath().resolve(filename(person, "xml"));
try
{
Files.write(outFilePath, Collections.singleton(ccdaXml), StandardOpenOption.CREATE_NEW);
} catch (IOException e)
{
e.printStackTrace();
}
}
if (Boolean.parseBoolean(Config.get("exporter.csv.export")))
{
try
{
CSVExporter.getInstance().export(person, stopTime);
} catch (IOException e)
{
e.printStackTrace();
}
}
if (Boolean.parseBoolean(Config.get("exporter.text.export")))
{
try
{
TextExporter.export(person, stopTime);
} catch (IOException e)
{
e.printStackTrace();
}
}
}
/**
* Run any exporters that require the full dataset to be generated prior to exporting.
* (Ex, an aggregate statistical exporter)
*
* @param generator Generator that generated the patients
*/
public static void runPostCompletionExports(Generator generator)
{
try{
HospitalExporter.export(generator.stop);
} catch (Exception e) {
e.printStackTrace();
}
if (Boolean.parseBoolean(Config.get("exporter.cost_access_outcomes_report")))
{
ReportExporter.export(generator);
}
if (Boolean.parseBoolean(Config.get("exporter.prevalence_report")))
{
try{
PrevalenceReport.export(generator);
} catch (Exception e) {
System.err.println("Prevalence report generation failed!");
e.printStackTrace();
}
}
}
public static File getOutputFolder(String folderName, Person person)
{
List<String> folders = new ArrayList<>();
folders.add(folderName);
if (person != null && Boolean.parseBoolean(Config.get("exporter.subfolders_by_id_substring")))
{
String id = (String)person.attributes.get(Person.ID);
folders.add(id.substring(0, 2));
folders.add(id.substring(0, 3));
}
String baseDirectory = Config.get("exporter.baseDirectory");
File f = Paths.get(baseDirectory, folders.toArray(new String[0])).toFile();
f.mkdirs();
return f;
}
public static String filename(Person person, String extension)
{
if (Boolean.parseBoolean(Config.get("exporter.use_uuid_filenames")))
{
return person.attributes.get(Person.ID) + "." + extension;
} else
{
// ensure unique filenames for now
return person.attributes.get(Person.NAME) + "_" + person.attributes.get(Person.ID) + "." + extension;
}
}
=======
public abstract class Exporter {
/**
* Export a single patient, into all the formats supported. (Formats may be enabled or disabled by
* configuration)
*
* @param person Patient to export
* @param stopTime Time at which the simulation stopped
*/
public static void export(Person person, long stopTime) {
// TODO: filter for export
if (Boolean.parseBoolean(Config.get("exporter.fhir.export"))) {
String bundleJson = FhirStu3.convertToFHIR(person, stopTime);
File outDirectory = getOutputFolder("fhir", person);
Path outFilePath = outDirectory.toPath().resolve(filename(person, "json"));
try {
Files.write(outFilePath, Collections.singleton(bundleJson), StandardOpenOption.CREATE_NEW);
} catch (IOException e) {
e.printStackTrace();
}
}
if (Boolean.parseBoolean(Config.get("exporter.ccda.export"))) {
String ccdaXml = CCDAExporter.export(person, stopTime);
File outDirectory = getOutputFolder("ccda", person);
Path outFilePath = outDirectory.toPath().resolve(filename(person, "xml"));
try {
Files.write(outFilePath, Collections.singleton(ccdaXml), StandardOpenOption.CREATE_NEW);
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* Run any exporters that require the full dataset to be generated prior to exporting. (Ex, an
* aggregate statistical exporter)
*
* @param generator
* Generator that generated the patients
*/
public static void runPostCompletionExports(Generator generator) {
try {
HospitalExporter.export(generator.stop);
} catch (Exception e) {
e.printStackTrace();
}
if (Boolean.parseBoolean(Config.get("exporter.cost_access_outcomes_report"))) {
ReportExporter.export(generator);
}
if (Boolean.parseBoolean(Config.get("exporter.prevalence_report"))) {
try {
PrevalenceReport.export(generator);
} catch (Exception e) {
System.err.println("Prevalence report generation failed!");
e.printStackTrace();
}
}
}
public static File getOutputFolder(String folderName, Person person) {
List<String> folders = new ArrayList<>();
folders.add(folderName);
if (person != null && Boolean.parseBoolean(Config.get("exporter.subfolders_by_id_substring"))) {
String id = (String) person.attributes.get(Person.ID);
folders.add(id.substring(0, 2));
folders.add(id.substring(0, 3));
}
String baseDirectory = Config.get("exporter.baseDirectory");
File f = Paths.get(baseDirectory, folders.toArray(new String[0])).toFile();
f.mkdirs();
return f;
}
public static String filename(Person person, String extension) {
if (Boolean.parseBoolean(Config.get("exporter.use_uuid_filenames"))) {
return person.attributes.get(Person.ID) + "." + extension;
} else {
// ensure unique filenames for now
return person.attributes.get(Person.NAME) + "_" + person.attributes.get(Person.ID) + "."
+ extension;
}
}
>>>>>>>
public abstract class Exporter {
/**
* Export a single patient, into all the formats supported. (Formats may be enabled or disabled by
* configuration)
*
* @param person Patient to export
* @param stopTime Time at which the simulation stopped
*/
public static void export(Person person, long stopTime) {
// TODO: filter for export
if (Boolean.parseBoolean(Config.get("exporter.fhir.export"))) {
String bundleJson = FhirStu3.convertToFHIR(person, stopTime);
File outDirectory = getOutputFolder("fhir", person);
Path outFilePath = outDirectory.toPath().resolve(filename(person, "json"));
try {
Files.write(outFilePath, Collections.singleton(bundleJson), StandardOpenOption.CREATE_NEW);
} catch (IOException e) {
e.printStackTrace();
}
}
if (Boolean.parseBoolean(Config.get("exporter.ccda.export"))) {
String ccdaXml = CCDAExporter.export(person, stopTime);
File outDirectory = getOutputFolder("ccda", person);
Path outFilePath = outDirectory.toPath().resolve(filename(person, "xml"));
try {
Files.write(outFilePath, Collections.singleton(ccdaXml), StandardOpenOption.CREATE_NEW);
} catch (IOException e) {
e.printStackTrace();
}
}
if (Boolean.parseBoolean(Config.get("exporter.csv.export"))) {
try {
CSVExporter.getInstance().export(person, stopTime);
} catch (IOException e) {
e.printStackTrace();
}
}
if (Boolean.parseBoolean(Config.get("exporter.text.export"))) {
try {
TextExporter.export(person, stopTime);
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* Run any exporters that require the full dataset to be generated prior to exporting. (Ex, an
* aggregate statistical exporter)
*
* @param generator
* Generator that generated the patients
*/
public static void runPostCompletionExports(Generator generator) {
try {
HospitalExporter.export(generator.stop);
} catch (Exception e) {
e.printStackTrace();
}
if (Boolean.parseBoolean(Config.get("exporter.cost_access_outcomes_report"))) {
ReportExporter.export(generator);
}
if (Boolean.parseBoolean(Config.get("exporter.prevalence_report"))) {
try {
PrevalenceReport.export(generator);
} catch (Exception e) {
System.err.println("Prevalence report generation failed!");
e.printStackTrace();
}
}
}
public static File getOutputFolder(String folderName, Person person) {
List<String> folders = new ArrayList<>();
folders.add(folderName);
if (person != null && Boolean.parseBoolean(Config.get("exporter.subfolders_by_id_substring"))) {
String id = (String) person.attributes.get(Person.ID);
folders.add(id.substring(0, 2));
folders.add(id.substring(0, 3));
}
String baseDirectory = Config.get("exporter.baseDirectory");
File f = Paths.get(baseDirectory, folders.toArray(new String[0])).toFile();
f.mkdirs();
return f;
}
public static String filename(Person person, String extension) {
if (Boolean.parseBoolean(Config.get("exporter.use_uuid_filenames"))) {
return person.attributes.get(Person.ID) + "." + extension;
} else {
// ensure unique filenames for now
return person.attributes.get(Person.NAME) + "_" + person.attributes.get(Person.ID) + "."
+ extension;
}
} |
<<<<<<<
Provider.loadProviders(location);
// Initialize Payers
if (Boolean.parseBoolean(Config.get("generate.health_insurance", "false"))) {
Payer.loadPayers(location);
} else {
Payer.loadNoInsurance();
}
=======
Provider.loadProviders(location, options.clinicianSeed);
>>>>>>>
Provider.loadProviders(location, options.clinicianSeed);
// Initialize Payers
if (Boolean.parseBoolean(Config.get("generate.health_insurance", "false"))) {
Payer.loadPayers(location);
} else {
Payer.loadNoInsurance();
} |
<<<<<<<
=======
import com.fasterxml.jackson.databind.type.LogicalType;
import java.math.BigDecimal;
import java.math.BigInteger;
>>>>>>>
<<<<<<<
=======
*
* @since 2.8
>>>>>>>
<<<<<<<
@Override
public ValueInstantiator createContextual(DeserializationContext ctxt,
BeanDescription beanDesc)
throws JsonMappingException
{
return this;
}
=======
>>>>>>>
@Override
public ValueInstantiator createContextual(DeserializationContext ctxt,
BeanDescription beanDesc)
throws JsonMappingException
{
return this;
} |
<<<<<<<
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.hl7.fhir.r4.model.Address;
import org.hl7.fhir.r4.model.AllergyIntolerance;
=======
import java.text.SimpleDateFormat;
import java.util.*;
import org.apache.sis.geometry.DirectPosition2D;
import org.hl7.fhir.r4.model.*;
>>>>>>>
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.hl7.fhir.r4.model.Address;
import org.hl7.fhir.r4.model.AllergyIntolerance;
<<<<<<<
import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.DateType;
import org.hl7.fhir.r4.model.DecimalType;
import org.hl7.fhir.r4.model.Device;
import org.hl7.fhir.r4.model.Device.DeviceNameType;
import org.hl7.fhir.r4.model.Device.FHIRDeviceStatus;
import org.hl7.fhir.r4.model.DiagnosticReport;
=======
>>>>>>>
import org.hl7.fhir.r4.model.DateTimeType;
import org.hl7.fhir.r4.model.DateType;
import org.hl7.fhir.r4.model.DecimalType;
import org.hl7.fhir.r4.model.Device;
import org.hl7.fhir.r4.model.Device.DeviceNameType;
import org.hl7.fhir.r4.model.Device.FHIRDeviceStatus;
import org.hl7.fhir.r4.model.DiagnosticReport;
<<<<<<<
import org.hl7.fhir.r4.model.DocumentReference;
import org.hl7.fhir.r4.model.DocumentReference.DocumentReferenceContextComponent;
import org.hl7.fhir.r4.model.Dosage;
=======
>>>>>>>
import org.hl7.fhir.r4.model.DocumentReference;
import org.hl7.fhir.r4.model.DocumentReference.DocumentReferenceContextComponent;
import org.hl7.fhir.r4.model.Dosage;
<<<<<<<
import org.hl7.fhir.r4.model.Enumerations.DocumentReferenceStatus;
import org.hl7.fhir.r4.model.ExplanationOfBenefit;
=======
>>>>>>>
import org.hl7.fhir.r4.model.Enumerations.DocumentReferenceStatus;
import org.hl7.fhir.r4.model.ExplanationOfBenefit;
<<<<<<<
import org.hl7.fhir.r4.model.HumanName;
import org.hl7.fhir.r4.model.Identifier.IdentifierUse;
import org.hl7.fhir.r4.model.Identifier;
=======
>>>>>>>
import org.hl7.fhir.r4.model.HumanName;
import org.hl7.fhir.r4.model.Identifier.IdentifierUse;
import org.hl7.fhir.r4.model.Identifier;
<<<<<<<
import org.hl7.fhir.r4.model.ImagingStudy.ImagingStudyStatus;
import org.hl7.fhir.r4.model.Immunization;
=======
>>>>>>>
import org.hl7.fhir.r4.model.ImagingStudy.ImagingStudyStatus;
import org.hl7.fhir.r4.model.Immunization;
<<<<<<<
import org.hl7.fhir.r4.model.IntegerType;
import org.hl7.fhir.r4.model.Location.LocationPositionComponent;
import org.hl7.fhir.r4.model.Location.LocationStatus;
import org.hl7.fhir.r4.model.Medication.MedicationStatus;
import org.hl7.fhir.r4.model.MedicationAdministration;
import org.hl7.fhir.r4.model.MedicationAdministration.MedicationAdministrationDosageComponent;
import org.hl7.fhir.r4.model.MedicationRequest;
=======
>>>>>>>
import org.hl7.fhir.r4.model.IntegerType;
import org.hl7.fhir.r4.model.Location.LocationPositionComponent;
import org.hl7.fhir.r4.model.Location.LocationStatus;
import org.hl7.fhir.r4.model.Medication.MedicationStatus;
import org.hl7.fhir.r4.model.MedicationAdministration;
import org.hl7.fhir.r4.model.MedicationAdministration.MedicationAdministrationDosageComponent;
import org.hl7.fhir.r4.model.MedicationRequest;
<<<<<<<
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Patient.ContactComponent;
=======
>>>>>>>
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Patient.ContactComponent;
<<<<<<<
import org.hl7.fhir.r4.model.Period;
import org.hl7.fhir.r4.model.PositiveIntType;
import org.hl7.fhir.r4.model.Practitioner;
import org.hl7.fhir.r4.model.PractitionerRole;
=======
>>>>>>>
import org.hl7.fhir.r4.model.Period;
import org.hl7.fhir.r4.model.PositiveIntType;
import org.hl7.fhir.r4.model.Practitioner;
import org.hl7.fhir.r4.model.PractitionerRole;
<<<<<<<
import org.hl7.fhir.r4.model.Provenance;
import org.hl7.fhir.r4.model.Provenance.ProvenanceAgentComponent;
import org.hl7.fhir.r4.model.Quantity;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.Resource;
import org.hl7.fhir.r4.model.ServiceRequest;
import org.hl7.fhir.r4.model.SimpleQuantity;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.Timing;
=======
>>>>>>>
import org.hl7.fhir.r4.model.Provenance;
import org.hl7.fhir.r4.model.Provenance.ProvenanceAgentComponent;
import org.hl7.fhir.r4.model.Quantity;
import org.hl7.fhir.r4.model.Reference;
import org.hl7.fhir.r4.model.Resource;
import org.hl7.fhir.r4.model.ServiceRequest;
import org.hl7.fhir.r4.model.SimpleQuantity;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.Timing;
<<<<<<<
if (USE_US_CORE_IG) {
// Add Provenance to the Bundle
provenance(bundle, person, stopTime);
}
=======
navigationalAssistance(person, personEntry, bundle);
wic(person, personEntry, bundle);
>>>>>>>
navigationalAssistance(person, personEntry, bundle);
wic(person, personEntry, bundle); |
<<<<<<<
// Track if we renewed meds at this encounter. Used in State.java encounter state.
public boolean chronicMedsRenewed;
=======
public String clinicalNote;
>>>>>>>
// Track if we renewed meds at this encounter. Used in State.java encounter state.
public boolean chronicMedsRenewed;
public String clinicalNote; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.