lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
apache-2.0
a95e408220593c505c6e79dba83e02fdbc3d1e8e
0
SSEHUB/EASyProducer,SSEHUB/EASyProducer,SSEHUB/EASyProducer
/* * Copyright 2009-2016 University of Hildesheim, Software Systems Engineering * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.ssehub.easy.producer.scenario_tests; import java.io.File; import java.io.IOException; import java.text.Collator; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Set; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import net.ssehub.easy.instantiation.core.model.buildlangModel.IBuildlangElement; import net.ssehub.easy.instantiation.core.model.buildlangModel.IEnumeratingLoop; import net.ssehub.easy.instantiation.core.model.buildlangModel.Rule; import net.ssehub.easy.instantiation.core.model.buildlangModel.Script; import net.ssehub.easy.instantiation.core.model.common.RuntimeEnvironment; import net.ssehub.easy.instantiation.core.model.common.VariableDeclaration; import net.ssehub.easy.instantiation.core.model.common.VilException; import net.ssehub.easy.instantiation.core.model.execution.IInstantiatorTracer; import net.ssehub.easy.instantiation.core.model.execution.TracerFactory; import net.ssehub.easy.instantiation.core.model.expressions.AbstractTracerBase; import net.ssehub.easy.instantiation.core.model.expressions.CallExpression.CallType; import net.ssehub.easy.instantiation.core.model.expressions.Expression; import net.ssehub.easy.instantiation.core.model.templateModel.Def; import net.ssehub.easy.instantiation.core.model.templateModel.ITemplateLangElement; import net.ssehub.easy.instantiation.core.model.templateModel.ITracer; import net.ssehub.easy.instantiation.core.model.templateModel.Template; import net.ssehub.easy.instantiation.core.model.tracing.ConsoleTracerFactory; import net.ssehub.easy.instantiation.core.model.vilTypes.Collection; import net.ssehub.easy.instantiation.core.model.vilTypes.FieldDescriptor; import net.ssehub.easy.instantiation.core.model.vilTypes.OperationDescriptor; import net.ssehub.easy.varModel.varModel.testSupport.TextTestUtils; /** * Experiment tests that currently can only be executed locally. * * @author Holger Eichelberger */ public class LocalExperimentsTests extends ExperimentsTests { /** * Starts up the test. */ @BeforeClass public static void startUp() { ExperimentsTests.startUp(); } /** * Tears down the test. */ @AfterClass public static void shutDown() { ExperimentsTests.shutDown(); } /** * Traces VTL execution times. * * @author Holger Eichelberger */ private static class VtlExecutionTimeTracer extends AbstractTracerBase implements net.ssehub.easy.instantiation.core.model.buildlangModel.ITracer, net.ssehub.easy.instantiation.core.model.templateModel.ITracer, IInstantiatorTracer { private HashMap<String, Long> start = new HashMap<String, Long>(); private HashMap<String, Long> results = new HashMap<String, Long>(); private int vtlNestingLevel = 0; /** * Returns the names of the measured scripts. * * @return the names */ private Set<String> scriptNames() { return results.keySet(); } /** * Returns the execution time of the given script. * * @param scriptName the name of the script * @return the execution time or <b>null</b> if unknown */ private Long getExecutionTime(String scriptName) { return null == scriptName ? null : results.get(scriptName); } @Override public void visitDef(Def def, RuntimeEnvironment<?, ?> environment) { } @Override public void visitedDef(Def def, RuntimeEnvironment<?, ?> environment, Object result) { } @Override public void trace(String text) { } @Override public void traceWarning(String text) { } @Override public void valueDefined(VariableDeclaration var, FieldDescriptor field, Object value) { } @Override public void traceExecutionException(VilException exception) { } @Override public void enable(boolean enable) { } @Override public void visitingCallExpression(OperationDescriptor descriptor, CallType callType, Object[] args) { } @Override public void visitedCallExpression(OperationDescriptor descriptor, CallType callType, Object[] args, Object result) { } @Override public void failedAt(Expression expression) { } @Override public void traceMessage(String message) { } @Override public void traceError(String message) { } @Override public void visitTemplate(Template template) { if (0 == vtlNestingLevel) { start.put(template.getName(), System.currentTimeMillis()); } vtlNestingLevel++; } @Override public void visitedTemplate(Template template) { vtlNestingLevel--; if (0 == vtlNestingLevel) { long endTime = System.currentTimeMillis(); Long startTime = start.remove(template.getName()); if (null != startTime) { results.put(template.getName(), endTime - startTime); } } } @Override public void visitedSwitch(Object select, int alternative, Object value) { } @Override public void visitLoop(VariableDeclaration var) { } @Override public void visitedLoop(VariableDeclaration var) { } @Override public void failedAt(ITemplateLangElement element) { } @Override public void visitFlush() { } @Override public void visitedFlush() { } @Override public void visitScript(Script script, RuntimeEnvironment<?, ?> environment) { } @Override public void visitScriptBody(Script script, RuntimeEnvironment<?, ?> environment) { } @Override public void visitedScript(Script script) { } @Override public void visitRule(Rule rule, RuntimeEnvironment<?, ?> environment) { } @Override public void visitedRule(Rule rule, RuntimeEnvironment<?, ?> environment, Object result) { } @Override public void visitLoop(IEnumeratingLoop loop, RuntimeEnvironment<?, ?> environment) { } @Override public void visitIteratorAssignment(IEnumeratingLoop loop, net.ssehub.easy.instantiation.core.model.buildlangModel.VariableDeclaration var, Object value) { } @Override public void visitedLoop(IEnumeratingLoop loop, RuntimeEnvironment<?, ?> environment) { } @Override public Collection<?> adjustSequenceForMap(Collection<?> collection) { return null; } @Override public Collection<Object> adjustSequenceForJoin(Collection<Object> collection) { return null; } @Override public void visitSystemCall(String[] args) { } @Override public void visitingInstantiator(String name) { } @Override public void visitedInstantiator(String name, Object result) { } @Override public void visitAlternative(boolean takeIf) { } @Override public void failedAt(IBuildlangElement element) { } @Override public void reset() { } @Override public void visitWhileBody() { } @Override public void visitedWhileBody() { } } /** * A tracer factory for time tracing. * * @author Holger Eichelberger */ private static class TimeTracerFactory extends TracerFactory { private VtlExecutionTimeTracer tracer; /** * Constructs a new tracer factory and sets this instance * as the default factory. */ private TimeTracerFactory() { tracer = new VtlExecutionTimeTracer(); } @Override public ITracer createTemplateLanguageTracerImpl() { return tracer; } @Override public net.ssehub.easy.instantiation.core.model.buildlangModel.ITracer createBuildLanguageTracerImpl() { return tracer; } @Override public IInstantiatorTracer createInstantiatorTracerImpl() { return tracer; } /** * Returns the names of the measured scripts. * * @return the names */ private Set<String> scriptNames() { return tracer.scriptNames(); } /** * Returns the execution time of the given script. * * @param scriptName the name of the script * @return the execution time or <b>null</b> if unknown */ private Long getExecutionTime(String scriptName) { return tracer.getExecutionTime(scriptName); } /** * Prints the execution time results as a simple table. */ private void printResults() { ArrayList<String> names = new ArrayList<String>(); names.addAll(scriptNames()); Collections.sort(names, Collator.getInstance()); for (String name : names) { Long time = getExecutionTime(name); if (null != time) { System.out.println(name + "\t\t" + time); } } } } /** * Test modified QM XML templates. * * @throws IOException shall not occur */ @Test public void testXml() throws IOException { TimeTracerFactory factory = new TimeTracerFactory(); TracerFactory.setInstance(factory); //TracerFactory.setInstance(new ConsoleTracerFactory(true)); // no reasoning: we just need specific structures/values File tmp = executeCase("xmlExperiments", null, "", null, Mode.INSTANTIATE); if (null != tmp) { File expected = new File(tmp, "GenTopoHardwareCorrelationFinancialPom.xml"); TextTestUtils.assertFileEquality(new File(tmp, "hwAlg.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "hwAlg1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "hwAlg2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "hwAlg3.xml"), expected); expected = new File(tmp, "InterfacesPom.xml"); TextTestUtils.assertFileEquality(new File(tmp, "ifPom.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "ifPom1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "ifPom2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "ifPom3.xml"), expected); expected = new File(tmp, "pipelinePom.xml"); TextTestUtils.assertFileEquality(new File(tmp, "pip.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pip1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pip2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pip3.xml"), expected); expected = new File(tmp, "pipelines.xml"); TextTestUtils.assertFileEquality(new File(tmp, "pips.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pips1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pips2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pips3.xml"), expected); expected = new File(tmp, "ModelGenPom.xml"); TextTestUtils.assertFileEquality(new File(tmp, "modelGenPom.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "modelGenPom1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "modelGenPom2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "modelGenPom3.xml"), expected); expected = new File(tmp, "pipSettings.xml"); TextTestUtils.assertFileEquality(new File(tmp, "set.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "set1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "set2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "set3.xml"), expected); factory.printResults(); } } /** * Test modified QM Java templates. * * @throws IOException shall not occur */ @Test public void testJava() throws IOException { // no reasoning: we just need specific structures/values File tmp = executeCase("javaExperiments", null, "", null, Mode.INSTANTIATE); if (null != tmp) { File expected = new File(tmp, "FCorrelationFinancial.java"); TextTestUtils.assertFileEquality(new File(tmp, "fcFam.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "fcFam0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "fcFam1.java"), expected); expected = new File(tmp, "IFCorrelationFinancial.java"); TextTestUtils.assertFileEquality(new File(tmp, "fcFamI.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "fcFamI0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "fcFamI1.java"), expected); expected = new File(tmp, "IPriorityDataSink.java"); TextTestUtils.assertFileEquality(new File(tmp, "psSnk.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnk0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnk1.java"), expected); expected = new File(tmp, "PriorityDataSink.java"); TextTestUtils.assertFileEquality(new File(tmp, "psSnkI.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnkI0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnkI1.java"), expected); expected = new File(tmp, "PriorityDataSinkProfiling.java"); TextTestUtils.assertFileEquality(new File(tmp, "psSnkIP.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnkIP0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnkIP1.java"), expected); expected = new File(tmp, "ISpringFinancialData.java"); TextTestUtils.assertFileEquality(new File(tmp, "spSrc.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrc0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrc1.java"), expected); expected = new File(tmp, "SpringFinancialData.java"); TextTestUtils.assertFileEquality(new File(tmp, "spSrcI.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrcI0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrcI1.java"), expected); expected = new File(tmp, "SpringFinancialDataProfiling.java"); TextTestUtils.assertFileEquality(new File(tmp, "spSrcIP.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrcIP0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrcIP1.java"), expected); expected = new File(tmp, "SpringFinancialDataProfilingImpl.java"); TextTestUtils.assertFileEquality(new File(tmp, "spProfSrcI.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spProfSrcI0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spProfSrcI1.java"), expected); expected = new File(tmp, "SerializationRegistry.java"); TextTestUtils.assertFileEquality(new File(tmp, "serReg.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "serReg0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "serReg1.java"), expected); expected = new File(tmp, "FCorrelationFinancialSerializers.java"); TextTestUtils.assertFileEquality(new File(tmp, "tSerFCor.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "tSerFCor0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "tSerFCor1.java"), expected); expected = new File(tmp, "GenTopoHardwareFinancialCorrReceiver.cpp"); TextTestUtils.assertFileEquality(new File(tmp, "FCorReceiver.cpp"), expected); TextTestUtils.assertFileEquality(new File(tmp, "FCorReceiver0.cpp"), expected); TextTestUtils.assertFileEquality(new File(tmp, "FCorReceiver1.cpp"), expected); expected = new File(tmp, "GenTopoHardwareFinancialCorrSender.cpp"); TextTestUtils.assertFileEquality(new File(tmp, "FCorSender.cpp"), expected); TextTestUtils.assertFileEquality(new File(tmp, "FCorSender0.cpp"), expected); TextTestUtils.assertFileEquality(new File(tmp, "FCorSender1.cpp"), expected); } } /** * For developing modified Java templates. * * @throws IOException shall not occur */ //@Ignore("just for development/debugging") @Test public void testJavaDebug() throws IOException { TracerFactory.setInstance(new ConsoleTracerFactory(true)); String[] names = new String[3]; names[0] = "javaExperiments"; names[1] = "javaExperiments"; names[2] = "javaExperimentsDebug"; // no reasoning: we just need specific structures/values File tmp = executeCase(names, null, "", null, Mode.INSTANTIATE); if (null != tmp) { File expected = new File(tmp, "PriorityPip_DataManagementElement0DataManagementElement.java"); TextTestUtils.assertFileEquality(new File(tmp, "PrioPip_DME.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "PrioPip_DME0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "PrioPip_DME1.java"), expected); } } }
Plugins/EASy-Producer/ScenariosTest/src/net/ssehub/easy/producer/scenario_tests/LocalExperimentsTests.java
/* * Copyright 2009-2016 University of Hildesheim, Software Systems Engineering * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.ssehub.easy.producer.scenario_tests; import java.io.File; import java.io.IOException; import java.text.Collator; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Set; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import net.ssehub.easy.instantiation.core.model.buildlangModel.IBuildlangElement; import net.ssehub.easy.instantiation.core.model.buildlangModel.IEnumeratingLoop; import net.ssehub.easy.instantiation.core.model.buildlangModel.Rule; import net.ssehub.easy.instantiation.core.model.buildlangModel.Script; import net.ssehub.easy.instantiation.core.model.common.RuntimeEnvironment; import net.ssehub.easy.instantiation.core.model.common.VariableDeclaration; import net.ssehub.easy.instantiation.core.model.common.VilException; import net.ssehub.easy.instantiation.core.model.execution.IInstantiatorTracer; import net.ssehub.easy.instantiation.core.model.execution.TracerFactory; import net.ssehub.easy.instantiation.core.model.expressions.AbstractTracerBase; import net.ssehub.easy.instantiation.core.model.expressions.CallExpression.CallType; import net.ssehub.easy.instantiation.core.model.expressions.Expression; import net.ssehub.easy.instantiation.core.model.templateModel.Def; import net.ssehub.easy.instantiation.core.model.templateModel.ITemplateLangElement; import net.ssehub.easy.instantiation.core.model.templateModel.ITracer; import net.ssehub.easy.instantiation.core.model.templateModel.Template; import net.ssehub.easy.instantiation.core.model.tracing.ConsoleTracerFactory; import net.ssehub.easy.instantiation.core.model.vilTypes.Collection; import net.ssehub.easy.instantiation.core.model.vilTypes.FieldDescriptor; import net.ssehub.easy.instantiation.core.model.vilTypes.OperationDescriptor; import net.ssehub.easy.varModel.varModel.testSupport.TextTestUtils; /** * Experiment tests that currently can only be executed locally. * * @author Holger Eichelberger */ public class LocalExperimentsTests extends ExperimentsTests { /** * Starts up the test. */ @BeforeClass public static void startUp() { ExperimentsTests.startUp(); } /** * Tears down the test. */ @AfterClass public static void shutDown() { ExperimentsTests.shutDown(); } /** * Traces VTL execution times. * * @author Holger Eichelberger */ private static class VtlExecutionTimeTracer extends AbstractTracerBase implements net.ssehub.easy.instantiation.core.model.buildlangModel.ITracer, net.ssehub.easy.instantiation.core.model.templateModel.ITracer, IInstantiatorTracer { private HashMap<String, Long> start = new HashMap<String, Long>(); private HashMap<String, Long> results = new HashMap<String, Long>(); private int vtlNestingLevel = 0; /** * Returns the names of the measured scripts. * * @return the names */ private Set<String> scriptNames() { return results.keySet(); } /** * Returns the execution time of the given script. * * @param scriptName the name of the script * @return the execution time or <b>null</b> if unknown */ private Long getExecutionTime(String scriptName) { return null == scriptName ? null : results.get(scriptName); } @Override public void visitDef(Def def, RuntimeEnvironment<?, ?> environment) { } @Override public void visitedDef(Def def, RuntimeEnvironment<?, ?> environment, Object result) { } @Override public void trace(String text) { } @Override public void valueDefined(VariableDeclaration var, FieldDescriptor field, Object value) { } @Override public void traceExecutionException(VilException exception) { } @Override public void enable(boolean enable) { } @Override public void visitingCallExpression(OperationDescriptor descriptor, CallType callType, Object[] args) { } @Override public void visitedCallExpression(OperationDescriptor descriptor, CallType callType, Object[] args, Object result) { } @Override public void failedAt(Expression expression) { } @Override public void traceMessage(String message) { } @Override public void traceError(String message) { } @Override public void visitTemplate(Template template) { if (0 == vtlNestingLevel) { start.put(template.getName(), System.currentTimeMillis()); } vtlNestingLevel++; } @Override public void visitedTemplate(Template template) { vtlNestingLevel--; if (0 == vtlNestingLevel) { long endTime = System.currentTimeMillis(); Long startTime = start.remove(template.getName()); if (null != startTime) { results.put(template.getName(), endTime - startTime); } } } @Override public void visitedSwitch(Object select, int alternative, Object value) { } @Override public void visitLoop(VariableDeclaration var) { } @Override public void visitedLoop(VariableDeclaration var) { } @Override public void failedAt(ITemplateLangElement element) { } @Override public void visitFlush() { } @Override public void visitedFlush() { } @Override public void visitScript(Script script, RuntimeEnvironment<?, ?> environment) { } @Override public void visitScriptBody(Script script, RuntimeEnvironment<?, ?> environment) { } @Override public void visitedScript(Script script) { } @Override public void visitRule(Rule rule, RuntimeEnvironment<?, ?> environment) { } @Override public void visitedRule(Rule rule, RuntimeEnvironment<?, ?> environment, Object result) { } @Override public void visitLoop(IEnumeratingLoop loop, RuntimeEnvironment<?, ?> environment) { } @Override public void visitIteratorAssignment(IEnumeratingLoop loop, net.ssehub.easy.instantiation.core.model.buildlangModel.VariableDeclaration var, Object value) { } @Override public void visitedLoop(IEnumeratingLoop loop, RuntimeEnvironment<?, ?> environment) { } @Override public Collection<?> adjustSequenceForMap(Collection<?> collection) { return null; } @Override public Collection<Object> adjustSequenceForJoin(Collection<Object> collection) { return null; } @Override public void visitSystemCall(String[] args) { } @Override public void visitingInstantiator(String name) { } @Override public void visitedInstantiator(String name, Object result) { } @Override public void visitAlternative(boolean takeIf) { } @Override public void failedAt(IBuildlangElement element) { } @Override public void reset() { } @Override public void visitWhileBody() { } @Override public void visitedWhileBody() { } } /** * A tracer factory for time tracing. * * @author Holger Eichelberger */ private static class TimeTracerFactory extends TracerFactory { private VtlExecutionTimeTracer tracer; /** * Constructs a new tracer factory and sets this instance * as the default factory. */ private TimeTracerFactory() { tracer = new VtlExecutionTimeTracer(); } @Override public ITracer createTemplateLanguageTracerImpl() { return tracer; } @Override public net.ssehub.easy.instantiation.core.model.buildlangModel.ITracer createBuildLanguageTracerImpl() { return tracer; } @Override public IInstantiatorTracer createInstantiatorTracerImpl() { return tracer; } /** * Returns the names of the measured scripts. * * @return the names */ private Set<String> scriptNames() { return tracer.scriptNames(); } /** * Returns the execution time of the given script. * * @param scriptName the name of the script * @return the execution time or <b>null</b> if unknown */ private Long getExecutionTime(String scriptName) { return tracer.getExecutionTime(scriptName); } /** * Prints the execution time results as a simple table. */ private void printResults() { ArrayList<String> names = new ArrayList<String>(); names.addAll(scriptNames()); Collections.sort(names, Collator.getInstance()); for (String name : names) { Long time = getExecutionTime(name); if (null != time) { System.out.println(name + "\t\t" + time); } } } } /** * Test modified QM XML templates. * * @throws IOException shall not occur */ @Test public void testXml() throws IOException { TimeTracerFactory factory = new TimeTracerFactory(); TracerFactory.setInstance(factory); //TracerFactory.setInstance(new ConsoleTracerFactory(true)); // no reasoning: we just need specific structures/values File tmp = executeCase("xmlExperiments", null, "", null, Mode.INSTANTIATE); if (null != tmp) { File expected = new File(tmp, "GenTopoHardwareCorrelationFinancialPom.xml"); TextTestUtils.assertFileEquality(new File(tmp, "hwAlg.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "hwAlg1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "hwAlg2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "hwAlg3.xml"), expected); expected = new File(tmp, "InterfacesPom.xml"); TextTestUtils.assertFileEquality(new File(tmp, "ifPom.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "ifPom1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "ifPom2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "ifPom3.xml"), expected); expected = new File(tmp, "pipelinePom.xml"); TextTestUtils.assertFileEquality(new File(tmp, "pip.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pip1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pip2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pip3.xml"), expected); expected = new File(tmp, "pipelines.xml"); TextTestUtils.assertFileEquality(new File(tmp, "pips.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pips1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pips2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "pips3.xml"), expected); expected = new File(tmp, "ModelGenPom.xml"); TextTestUtils.assertFileEquality(new File(tmp, "modelGenPom.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "modelGenPom1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "modelGenPom2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "modelGenPom3.xml"), expected); expected = new File(tmp, "pipSettings.xml"); TextTestUtils.assertFileEquality(new File(tmp, "set.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "set1.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "set2.xml"), expected); TextTestUtils.assertFileEquality(new File(tmp, "set3.xml"), expected); factory.printResults(); } } /** * Test modified QM Java templates. * * @throws IOException shall not occur */ @Test public void testJava() throws IOException { // no reasoning: we just need specific structures/values File tmp = executeCase("javaExperiments", null, "", null, Mode.INSTANTIATE); if (null != tmp) { File expected = new File(tmp, "FCorrelationFinancial.java"); TextTestUtils.assertFileEquality(new File(tmp, "fcFam.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "fcFam0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "fcFam1.java"), expected); expected = new File(tmp, "IFCorrelationFinancial.java"); TextTestUtils.assertFileEquality(new File(tmp, "fcFamI.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "fcFamI0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "fcFamI1.java"), expected); expected = new File(tmp, "IPriorityDataSink.java"); TextTestUtils.assertFileEquality(new File(tmp, "psSnk.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnk0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnk1.java"), expected); expected = new File(tmp, "PriorityDataSink.java"); TextTestUtils.assertFileEquality(new File(tmp, "psSnkI.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnkI0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnkI1.java"), expected); expected = new File(tmp, "PriorityDataSinkProfiling.java"); TextTestUtils.assertFileEquality(new File(tmp, "psSnkIP.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnkIP0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "psSnkIP1.java"), expected); expected = new File(tmp, "ISpringFinancialData.java"); TextTestUtils.assertFileEquality(new File(tmp, "spSrc.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrc0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrc1.java"), expected); expected = new File(tmp, "SpringFinancialData.java"); TextTestUtils.assertFileEquality(new File(tmp, "spSrcI.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrcI0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrcI1.java"), expected); expected = new File(tmp, "SpringFinancialDataProfiling.java"); TextTestUtils.assertFileEquality(new File(tmp, "spSrcIP.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrcIP0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spSrcIP1.java"), expected); expected = new File(tmp, "SpringFinancialDataProfilingImpl.java"); TextTestUtils.assertFileEquality(new File(tmp, "spProfSrcI.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spProfSrcI0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "spProfSrcI1.java"), expected); expected = new File(tmp, "SerializationRegistry.java"); TextTestUtils.assertFileEquality(new File(tmp, "serReg.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "serReg0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "serReg1.java"), expected); expected = new File(tmp, "FCorrelationFinancialSerializers.java"); TextTestUtils.assertFileEquality(new File(tmp, "tSerFCor.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "tSerFCor0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "tSerFCor1.java"), expected); expected = new File(tmp, "GenTopoHardwareFinancialCorrReceiver.cpp"); TextTestUtils.assertFileEquality(new File(tmp, "FCorReceiver.cpp"), expected); TextTestUtils.assertFileEquality(new File(tmp, "FCorReceiver0.cpp"), expected); TextTestUtils.assertFileEquality(new File(tmp, "FCorReceiver1.cpp"), expected); expected = new File(tmp, "GenTopoHardwareFinancialCorrSender.cpp"); TextTestUtils.assertFileEquality(new File(tmp, "FCorSender.cpp"), expected); TextTestUtils.assertFileEquality(new File(tmp, "FCorSender0.cpp"), expected); TextTestUtils.assertFileEquality(new File(tmp, "FCorSender1.cpp"), expected); } } /** * For developing modified Java templates. * * @throws IOException shall not occur */ //@Ignore("just for development/debugging") @Test public void testJavaDebug() throws IOException { TracerFactory.setInstance(new ConsoleTracerFactory(true)); String[] names = new String[3]; names[0] = "javaExperiments"; names[1] = "javaExperiments"; names[2] = "javaExperimentsDebug"; // no reasoning: we just need specific structures/values File tmp = executeCase(names, null, "", null, Mode.INSTANTIATE); if (null != tmp) { File expected = new File(tmp, "PriorityPip_DataManagementElement0DataManagementElement.java"); TextTestUtils.assertFileEquality(new File(tmp, "PrioPip_DME.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "PrioPip_DME0.java"), expected); TextTestUtils.assertFileEquality(new File(tmp, "PrioPip_DME1.java"), expected); } } }
subsequent change, tracer warnings
Plugins/EASy-Producer/ScenariosTest/src/net/ssehub/easy/producer/scenario_tests/LocalExperimentsTests.java
subsequent change, tracer warnings
Java
apache-2.0
2cbeb8ff0c8af2afb461aff8693fe9ed1bec8d24
0
apache/jackrabbit,apache/jackrabbit,apache/jackrabbit
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.jcr2spi; import org.apache.jackrabbit.jcr2spi.state.PropertyState; import org.apache.jackrabbit.jcr2spi.operation.SetPropertyValue; import org.apache.jackrabbit.jcr2spi.operation.Operation; import org.apache.jackrabbit.name.NoPrefixDeclaredException; import org.apache.jackrabbit.name.QName; import org.apache.jackrabbit.name.NameFormat; import org.apache.jackrabbit.value.QValue; import org.apache.jackrabbit.value.ValueFormat; import org.apache.jackrabbit.value.ValueHelper; import org.slf4j.LoggerFactory; import org.slf4j.Logger; import javax.jcr.nodetype.ConstraintViolationException; import javax.jcr.nodetype.PropertyDefinition; import javax.jcr.lock.LockException; import javax.jcr.version.VersionException; import javax.jcr.Property; import javax.jcr.Item; import javax.jcr.RepositoryException; import javax.jcr.Node; import javax.jcr.AccessDeniedException; import javax.jcr.ItemNotFoundException; import javax.jcr.ItemVisitor; import javax.jcr.Value; import javax.jcr.ValueFormatException; import javax.jcr.PropertyType; import java.io.InputStream; import java.util.Calendar; /** * <code>PropertyImpl</code>... */ public class PropertyImpl extends ItemImpl implements Property { private static Logger log = LoggerFactory.getLogger(PropertyImpl.class); public static final int UNDEFINED_PROPERTY_LENGTH = -1; private final PropertyDefinition definition; public PropertyImpl(ItemManagerImpl itemManager, SessionImpl session, PropertyState state, PropertyDefinition definition, ItemLifeCycleListener[] listeners) { super(itemManager, session, state, listeners); this.definition = definition; // value will be read on demand } //-----------------------------------------------------< Item interface >--- /** * @see Item#getName() */ public String getName() throws RepositoryException { checkStatus(); QName name = getQName(); try { return NameFormat.format(name, session.getNamespaceResolver()); } catch (NoPrefixDeclaredException npde) { // should never get here... String msg = "Internal error: encountered unregistered namespace " + name.getNamespaceURI(); log.debug(msg); throw new RepositoryException(msg, npde); } } /** * @see Item#getParent() */ public Node getParent() throws ItemNotFoundException, AccessDeniedException, RepositoryException { checkStatus(); return (Node) itemMgr.getItem(getItemState().getParent()); } /** * Implementation of {@link Item#accept(javax.jcr.ItemVisitor)} for property. * * @param visitor * @see Item#accept(javax.jcr.ItemVisitor) */ public void accept(ItemVisitor visitor) throws RepositoryException { checkStatus(); visitor.visit(this); } /** * Returns false * * @return false * @see javax.jcr.Item#isNode() */ public boolean isNode() { return false; } //-------------------------------------------------< Property interface >--- /** * @see Property#setValue(javax.jcr.Value) */ public void setValue(Value value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int valueType = (value != null) ? value.getType() : PropertyType.UNDEFINED; int reqType = getRequiredType(valueType); setValue(value, reqType); } /** * @see Property#setValue(javax.jcr.Value[]) */ public void setValue(Value[] values) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(true); // assert equal types for all values entries int valueType = PropertyType.UNDEFINED; if (values != null) { for (int i = 0; i < values.length; i++) { if (values[i] == null) { // skip null values as those will be purged later continue; } if (valueType == PropertyType.UNDEFINED) { valueType = values[i].getType(); } else if (valueType != values[i].getType()) { String msg = "Inhomogeneous type of values (" + safeGetJCRPath() + ")"; log.debug(msg); throw new ValueFormatException(msg); } } } int targetType = definition.getRequiredType(); if (targetType == PropertyType.UNDEFINED) { targetType = (valueType == PropertyType.UNDEFINED) ? PropertyType.STRING : valueType; } // convert to internal values of correct type QValue[] qValues = null; if (values != null) { Value[] vs = ValueHelper.convert(values, targetType, session.getValueFactory()); qValues = ValueFormat.getQValues(vs, session.getNamespaceResolver()); } setInternalValues(qValues, targetType); } /** * @see Property#setValue(String) */ public void setValue(String value) throws ValueFormatException, VersionException, LockException, ConstraintViolationException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.STRING); if (value == null) { setInternalValues(null, reqType); } else { setValue(session.getValueFactory().createValue(value), reqType); } } /** * @see Property#setValue(String[]) */ public void setValue(String[] values) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(true); int reqType = getRequiredType(PropertyType.STRING); QValue[] qValues = null; // convert to internal values of correct type if (values != null) { qValues = new QValue[values.length]; for (int i = 0; i < values.length; i++) { String string = values[i]; QValue qValue = null; if (string != null) { if (reqType != PropertyType.STRING) { // type conversion required Value v = ValueHelper.convert(string, reqType, session.getValueFactory()); qValue = ValueFormat.getQValue(v, session.getNamespaceResolver()); } else { // no type conversion required qValue = QValue.create(string); } } qValues[i] = qValue; } } setInternalValues(qValues, reqType); } /** * @see Property#setValue(InputStream) */ public void setValue(InputStream value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.BINARY); if (value == null) { setInternalValues(null, reqType); } else { setValue(session.getValueFactory().createValue(value), reqType); } } /** * @see Property#setValue(long) */ public void setValue(long value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.LONG); setValue(session.getValueFactory().createValue(value), reqType); } /** * @see Property#setValue(double) */ public void setValue(double value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.DOUBLE); setValue(session.getValueFactory().createValue(value), reqType); } /** * @see Property#setValue(Calendar) */ public void setValue(Calendar value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.DATE); if (value == null) { setInternalValues(null, reqType); } else { setValue(session.getValueFactory().createValue(value), reqType); } } /** * @see Property#setValue(boolean) */ public void setValue(boolean value) throws ValueFormatException, VersionException, LockException, ConstraintViolationException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.BOOLEAN); setValue(session.getValueFactory().createValue(value), reqType); } /** * @see Property#setValue(Node) */ public void setValue(Node value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.REFERENCE); if (value == null) { setInternalValues(null, reqType); } else { if (reqType == PropertyType.REFERENCE) { if (value instanceof NodeImpl) { NodeImpl targetNode = (NodeImpl)value; if (targetNode.isNodeType(QName.MIX_REFERENCEABLE)) { QValue qValue = QValue.create(targetNode.getUUID(), PropertyType.REFERENCE); setInternalValues(new QValue[]{qValue}, reqType); } else { throw new ValueFormatException("Target node must be of node type mix:referenceable"); } } else { String msg = "Incompatible Node object: " + value + "(" + safeGetJCRPath() + ")"; log.debug(msg); throw new RepositoryException(msg); } } else { throw new ValueFormatException("Property must be of type REFERENCE (" + safeGetJCRPath() + ")"); } } } /** * @see Property#getValue() */ public Value getValue() throws ValueFormatException, RepositoryException { QValue value = getQValue(); return ValueFormat.getJCRValue(value, session.getNamespaceResolver(), session.getValueFactory()); } /** * @see Property#getValues() */ public Value[] getValues() throws ValueFormatException, RepositoryException { QValue[] qValues = getQValues(); Value[] values = new Value[qValues.length]; for (int i = 0; i < qValues.length; i++) { values[i] = ValueFormat.getJCRValue(qValues[i], session.getNamespaceResolver(), session.getValueFactory()); } return values; } /** * @see Property#getString() */ public String getString() throws ValueFormatException, RepositoryException { return getValue().getString(); } /** * @see Property#getStream() */ public InputStream getStream() throws ValueFormatException, RepositoryException { return getValue().getStream(); } /** * @see Property#getLong() */ public long getLong() throws ValueFormatException, RepositoryException { return getValue().getLong(); } /** * @see Property#getDouble() */ public double getDouble() throws ValueFormatException, RepositoryException { return getValue().getDouble(); } /** * @see Property#getDate() */ public Calendar getDate() throws ValueFormatException, RepositoryException { return getValue().getDate(); } /** * @see Property#getBoolean() */ public boolean getBoolean() throws ValueFormatException, RepositoryException { return getValue().getBoolean(); } /** * @see Property#getNode() */ public Node getNode() throws ValueFormatException, RepositoryException { QValue value = getQValue(); if (value.getType() == PropertyType.REFERENCE) { return session.getNodeByUUID(value.getString()); } else { throw new ValueFormatException("Property must be of type REFERENCE (" + safeGetJCRPath() + ")"); } } /** * @see Property#getLength */ public long getLength() throws ValueFormatException, RepositoryException { return getLength(getQValue()); } /** * @see Property#getLengths */ public long[] getLengths() throws ValueFormatException, RepositoryException { QValue[] values = getQValues(); long[] lengths = new long[values.length]; for (int i = 0; i < values.length; i++) { lengths[i] = getLength(values[i]); } return lengths; } /** * * @param value * @return * @throws RepositoryException */ private long getLength(QValue value) throws RepositoryException { long length = UNDEFINED_PROPERTY_LENGTH; switch (value.getType()) { case PropertyType.STRING: case PropertyType.BINARY: case PropertyType.LONG: case PropertyType.DOUBLE: length = value.getLength(); break; case PropertyType.NAME: Value jcrValue = ValueFormat.getJCRValue(value, session.getNamespaceResolver(), session.getValueFactory()); length = jcrValue.getString().length(); break; } return length; } /** * @see javax.jcr.Property#getDefinition() */ public PropertyDefinition getDefinition() throws RepositoryException { checkStatus(); return definition; } /** * @see javax.jcr.Property#getType() */ public int getType() throws RepositoryException { checkStatus(); return getPropertyState().getType(); } //-----------------------------------------------------------< ItemImpl >--- /** * Returns the QName defined with this <code>PropertyState</code> * * @return * @see PropertyState#getQName() * @see ItemImpl#getQName() */ QName getQName() { return getPropertyState().getQName(); } //------------------------------------------------------< check methods >--- /** * * @param multiValues * @throws RepositoryException */ private void checkIsWritable(boolean multiValues) throws RepositoryException { // check common to properties and nodes checkIsWritable(); // property specific check if (definition.isMultiple() != multiValues) { throw new ValueFormatException(getPath() + "Multivalue definition of " + safeGetJCRPath() + " does not match to given value(s)."); } } //---------------------------------------------< private implementation >--- /** * * @return true if the definition indicates that this Property is multivalued. */ private boolean isMultiple() { return definition.isMultiple(); } /** * * @param defaultType * @return the required type for this property. */ private int getRequiredType(int defaultType) { // check type according to definition of this property int reqType = definition.getRequiredType(); if (reqType == PropertyType.UNDEFINED) { if (defaultType == PropertyType.UNDEFINED) { reqType = PropertyType.STRING; } else { reqType = defaultType; } } return reqType; } /** * * @return * @throws ValueFormatException * @throws RepositoryException */ private QValue getQValue() throws ValueFormatException, RepositoryException { checkStatus(); if (isMultiple()) { throw new ValueFormatException(safeGetJCRPath() + " is multi-valued and can therefore only be retrieved as an array of values"); } // avoid unnecessary object creation if possible return getPropertyState().getValue(); } /** * * @return * @throws ValueFormatException * @throws RepositoryException */ private QValue[] getQValues() throws ValueFormatException, RepositoryException { checkStatus(); if (!isMultiple()) { throw new ValueFormatException(safeGetJCRPath() + " is not multi-valued and can therefore only be retrieved as single value"); } // avoid unnecessary object creation if possible return getPropertyState().getValues(); } /** * * @param value * @param requiredType * @throws RepositoryException */ private void setValue(Value value, int requiredType) throws RepositoryException { if (requiredType == PropertyType.UNDEFINED) { // should never get here since calling methods assert valid type throw new IllegalArgumentException("Property type of a value cannot be undefined (" + safeGetJCRPath() + ")."); } if (value == null) { setInternalValues(null, requiredType); return; } QValue qValue; if (requiredType != value.getType()) { // type conversion required Value v = ValueHelper.convert(value, requiredType, session.getValueFactory()); qValue = ValueFormat.getQValue(v, session.getNamespaceResolver()); } else { // no type conversion required qValue = ValueFormat.getQValue(value, session.getNamespaceResolver()); } setInternalValues(new QValue[]{qValue}, requiredType); } /** * * @param qValues * @param valueType * @throws ConstraintViolationException * @throws RepositoryException */ private void setInternalValues(QValue[] qValues, int valueType) throws ConstraintViolationException, RepositoryException { // check for null value if (qValues == null) { // setting a property to null removes it automatically remove(); return; } // modify the state of this property Operation op = SetPropertyValue.create(getPropertyState(), qValues, valueType); session.getSessionItemStateManager().execute(op); } /** * Private helper to access the <code>PropertyState</code> directly * * @return state for this Property */ private PropertyState getPropertyState() { return (PropertyState) getItemState(); } }
contrib/spi/jcr2spi/src/main/java/org/apache/jackrabbit/jcr2spi/PropertyImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.jcr2spi; import org.apache.jackrabbit.jcr2spi.state.PropertyState; import org.apache.jackrabbit.jcr2spi.operation.SetPropertyValue; import org.apache.jackrabbit.jcr2spi.operation.Operation; import org.apache.jackrabbit.name.NoPrefixDeclaredException; import org.apache.jackrabbit.name.QName; import org.apache.jackrabbit.name.NameFormat; import org.apache.jackrabbit.value.QValue; import org.apache.jackrabbit.value.ValueFormat; import org.apache.jackrabbit.value.ValueHelper; import org.slf4j.LoggerFactory; import org.slf4j.Logger; import javax.jcr.nodetype.ConstraintViolationException; import javax.jcr.nodetype.PropertyDefinition; import javax.jcr.lock.LockException; import javax.jcr.version.VersionException; import javax.jcr.Property; import javax.jcr.Item; import javax.jcr.RepositoryException; import javax.jcr.Node; import javax.jcr.AccessDeniedException; import javax.jcr.ItemNotFoundException; import javax.jcr.ItemVisitor; import javax.jcr.Value; import javax.jcr.ValueFormatException; import javax.jcr.PropertyType; import java.io.InputStream; import java.util.Calendar; /** * <code>PropertyImpl</code>... */ public class PropertyImpl extends ItemImpl implements Property { private static Logger log = LoggerFactory.getLogger(PropertyImpl.class); public static final int UNDEFINED_PROPERTY_LENGTH = -1; private final PropertyDefinition definition; public PropertyImpl(ItemManagerImpl itemManager, SessionImpl session, PropertyState state, PropertyDefinition definition, ItemLifeCycleListener[] listeners) { super(itemManager, session, state, listeners); this.definition = definition; // value will be read on demand } //-----------------------------------------------------< Item interface >--- /** * @see Item#getName() */ public String getName() throws RepositoryException { checkStatus(); QName name = getQName(); try { return NameFormat.format(name, session.getNamespaceResolver()); } catch (NoPrefixDeclaredException npde) { // should never get here... String msg = "Internal error: encountered unregistered namespace " + name.getNamespaceURI(); log.debug(msg); throw new RepositoryException(msg, npde); } } /** * @see Item#getParent() */ public Node getParent() throws ItemNotFoundException, AccessDeniedException, RepositoryException { checkStatus(); return (Node) itemMgr.getItem(getItemState().getParent()); } /** * Implementation of {@link Item#accept(javax.jcr.ItemVisitor)} for property. * * @param visitor * @see Item#accept(javax.jcr.ItemVisitor) */ public void accept(ItemVisitor visitor) throws RepositoryException { checkStatus(); visitor.visit(this); } /** * Returns false * * @return false * @see javax.jcr.Item#isNode() */ public boolean isNode() { return false; } //-------------------------------------------------< Property interface >--- /** * @see Property#setValue(javax.jcr.Value) */ public void setValue(Value value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int valueType = (value != null) ? value.getType() : PropertyType.UNDEFINED; int reqType = getRequiredType(valueType); setValue(value, reqType); } /** * @see Property#setValue(javax.jcr.Value[]) */ public void setValue(Value[] values) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(true); // assert equal types for all values entries int valueType = PropertyType.UNDEFINED; if (values != null) { for (int i = 0; i < values.length; i++) { if (values[i] == null) { // skip null values as those will be purged later continue; } if (valueType == PropertyType.UNDEFINED) { valueType = values[i].getType(); } else if (valueType != values[i].getType()) { String msg = "Inhomogeneous type of values (" + safeGetJCRPath() + ")"; log.debug(msg); throw new ValueFormatException(msg); } } } int targetType = definition.getRequiredType(); if (targetType == PropertyType.UNDEFINED) { targetType = (valueType == PropertyType.UNDEFINED) ? PropertyType.STRING : valueType; } // convert to internal values of correct type QValue[] qValues = null; if (values != null) { Value[] vs = ValueHelper.convert(values, targetType, session.getValueFactory()); qValues = ValueFormat.getQValues(vs, session.getNamespaceResolver()); } setInternalValues(qValues, targetType); } /** * @see Property#setValue(String) */ public void setValue(String value) throws ValueFormatException, VersionException, LockException, ConstraintViolationException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.STRING); if (value == null) { setInternalValues(null, reqType); } else { setValue(session.getValueFactory().createValue(value), reqType); } } /** * @see Property#setValue(String[]) */ public void setValue(String[] values) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(true); int reqType = getRequiredType(PropertyType.STRING); QValue[] qValues = null; // convert to internal values of correct type if (values != null) { qValues = new QValue[values.length]; for (int i = 0; i < values.length; i++) { String string = values[i]; QValue qValue = null; if (string != null) { if (reqType != PropertyType.STRING) { // type conversion required Value v = ValueHelper.convert(string, reqType, session.getValueFactory()); qValue = ValueFormat.getQValue(v, session.getNamespaceResolver()); } else { // no type conversion required qValue = QValue.create(string); } } qValues[i] = qValue; } } setInternalValues(qValues, reqType); } /** * @see Property#setValue(InputStream) */ public void setValue(InputStream value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.BINARY); setValue(session.getValueFactory().createValue(value), reqType); } /** * @see Property#setValue(long) */ public void setValue(long value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.LONG); setValue(session.getValueFactory().createValue(value), reqType); } /** * @see Property#setValue(double) */ public void setValue(double value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.DOUBLE); setValue(session.getValueFactory().createValue(value), reqType); } /** * @see Property#setValue(Calendar) */ public void setValue(Calendar value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.DATE); if (value == null) { setInternalValues(null, reqType); } else { setValue(session.getValueFactory().createValue(value), reqType); } } /** * @see Property#setValue(boolean) */ public void setValue(boolean value) throws ValueFormatException, VersionException, LockException, ConstraintViolationException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.BOOLEAN); setValue(session.getValueFactory().createValue(value), reqType); } /** * @see Property#setValue(Node) */ public void setValue(Node value) throws ValueFormatException, VersionException, LockException, RepositoryException { checkIsWritable(false); int reqType = getRequiredType(PropertyType.REFERENCE); if (value == null) { setInternalValues(null, reqType); } else { if (reqType == PropertyType.REFERENCE) { if (value instanceof NodeImpl) { NodeImpl targetNode = (NodeImpl)value; if (targetNode.isNodeType(QName.MIX_REFERENCEABLE)) { QValue qValue = QValue.create(targetNode.getUUID(), PropertyType.REFERENCE); setInternalValues(new QValue[]{qValue}, reqType); } else { throw new ValueFormatException("Target node must be of node type mix:referenceable"); } } else { String msg = "Incompatible Node object: " + value + "(" + safeGetJCRPath() + ")"; log.debug(msg); throw new RepositoryException(msg); } } else { throw new ValueFormatException("Property must be of type REFERENCE (" + safeGetJCRPath() + ")"); } } } /** * @see Property#getValue() */ public Value getValue() throws ValueFormatException, RepositoryException { QValue value = getQValue(); return ValueFormat.getJCRValue(value, session.getNamespaceResolver(), session.getValueFactory()); } /** * @see Property#getValues() */ public Value[] getValues() throws ValueFormatException, RepositoryException { QValue[] qValues = getQValues(); Value[] values = new Value[qValues.length]; for (int i = 0; i < qValues.length; i++) { values[i] = ValueFormat.getJCRValue(qValues[i], session.getNamespaceResolver(), session.getValueFactory()); } return values; } /** * @see Property#getString() */ public String getString() throws ValueFormatException, RepositoryException { return getValue().getString(); } /** * @see Property#getStream() */ public InputStream getStream() throws ValueFormatException, RepositoryException { return getValue().getStream(); } /** * @see Property#getLong() */ public long getLong() throws ValueFormatException, RepositoryException { return getValue().getLong(); } /** * @see Property#getDouble() */ public double getDouble() throws ValueFormatException, RepositoryException { return getValue().getDouble(); } /** * @see Property#getDate() */ public Calendar getDate() throws ValueFormatException, RepositoryException { return getValue().getDate(); } /** * @see Property#getBoolean() */ public boolean getBoolean() throws ValueFormatException, RepositoryException { return getValue().getBoolean(); } /** * @see Property#getNode() */ public Node getNode() throws ValueFormatException, RepositoryException { QValue value = getQValue(); if (value.getType() == PropertyType.REFERENCE) { return session.getNodeByUUID(value.getString()); } else { throw new ValueFormatException("Property must be of type REFERENCE (" + safeGetJCRPath() + ")"); } } /** * @see Property#getLength */ public long getLength() throws ValueFormatException, RepositoryException { return getLength(getQValue()); } /** * @see Property#getLengths */ public long[] getLengths() throws ValueFormatException, RepositoryException { QValue[] values = getQValues(); long[] lengths = new long[values.length]; for (int i = 0; i < values.length; i++) { lengths[i] = getLength(values[i]); } return lengths; } /** * * @param value * @return * @throws RepositoryException */ private long getLength(QValue value) throws RepositoryException { long length = UNDEFINED_PROPERTY_LENGTH; switch (value.getType()) { case PropertyType.STRING: case PropertyType.BINARY: case PropertyType.LONG: case PropertyType.DOUBLE: length = value.getLength(); break; case PropertyType.NAME: Value jcrValue = ValueFormat.getJCRValue(value, session.getNamespaceResolver(), session.getValueFactory()); length = jcrValue.getString().length(); break; } return length; } /** * @see javax.jcr.Property#getDefinition() */ public PropertyDefinition getDefinition() throws RepositoryException { checkStatus(); return definition; } /** * @see javax.jcr.Property#getType() */ public int getType() throws RepositoryException { checkStatus(); return getPropertyState().getType(); } //-----------------------------------------------------------< ItemImpl >--- /** * Returns the QName defined with this <code>PropertyState</code> * * @return * @see PropertyState#getQName() * @see ItemImpl#getQName() */ QName getQName() { return getPropertyState().getQName(); } //------------------------------------------------------< check methods >--- /** * * @param multiValues * @throws RepositoryException */ private void checkIsWritable(boolean multiValues) throws RepositoryException { // check common to properties and nodes checkIsWritable(); // property specific check if (definition.isMultiple() != multiValues) { throw new ValueFormatException(getPath() + "Multivalue definition of " + safeGetJCRPath() + " does not match to given value(s)."); } } //---------------------------------------------< private implementation >--- /** * * @return true if the definition indicates that this Property is multivalued. */ private boolean isMultiple() { return definition.isMultiple(); } /** * * @param defaultType * @return the required type for this property. */ private int getRequiredType(int defaultType) { // check type according to definition of this property int reqType = definition.getRequiredType(); if (reqType == PropertyType.UNDEFINED) { if (defaultType == PropertyType.UNDEFINED) { reqType = PropertyType.STRING; } else { reqType = defaultType; } } return reqType; } /** * * @return * @throws ValueFormatException * @throws RepositoryException */ private QValue getQValue() throws ValueFormatException, RepositoryException { checkStatus(); if (isMultiple()) { throw new ValueFormatException(safeGetJCRPath() + " is multi-valued and can therefore only be retrieved as an array of values"); } // avoid unnecessary object creation if possible return getPropertyState().getValue(); } /** * * @return * @throws ValueFormatException * @throws RepositoryException */ private QValue[] getQValues() throws ValueFormatException, RepositoryException { checkStatus(); if (!isMultiple()) { throw new ValueFormatException(safeGetJCRPath() + " is not multi-valued and can therefore only be retrieved as single value"); } // avoid unnecessary object creation if possible return getPropertyState().getValues(); } /** * * @param value * @param requiredType * @throws RepositoryException */ private void setValue(Value value, int requiredType) throws RepositoryException { if (requiredType == PropertyType.UNDEFINED) { // should never get here since calling methods assert valid type throw new IllegalArgumentException("Property type of a value cannot be undefined (" + safeGetJCRPath() + ")."); } if (value == null) { setInternalValues(null, requiredType); return; } QValue qValue; if (requiredType != value.getType()) { // type conversion required Value v = ValueHelper.convert(value, requiredType, session.getValueFactory()); qValue = ValueFormat.getQValue(v, session.getNamespaceResolver()); } else { // no type conversion required qValue = ValueFormat.getQValue(value, session.getNamespaceResolver()); } setInternalValues(new QValue[]{qValue}, requiredType); } /** * * @param qValues * @param valueType * @throws ConstraintViolationException * @throws RepositoryException */ private void setInternalValues(QValue[] qValues, int valueType) throws ConstraintViolationException, RepositoryException { // check for null value if (qValues == null) { // setting a property to null removes it automatically remove(); return; } // modify the state of this property Operation op = SetPropertyValue.create(getPropertyState(), qValues, valueType); session.getSessionItemStateManager().execute(op); } /** * Private helper to access the <code>PropertyState</code> directly * * @return state for this Property */ private PropertyState getPropertyState() { return (PropertyState) getItemState(); } }
work in progress setValue(InputStream): missing handling for null values git-svn-id: 02b679d096242155780e1604e997947d154ee04a@464422 13f79535-47bb-0310-9956-ffa450edef68
contrib/spi/jcr2spi/src/main/java/org/apache/jackrabbit/jcr2spi/PropertyImpl.java
work in progress
Java
apache-2.0
96de2d7e67e825a66d6264bf5816356ad238ba68
0
Ardulink/Ardulink-2,Ardulink/Ardulink-2,Ardulink/Ardulink-2
package org.ardulink.util.anno; import static java.lang.annotation.RetentionPolicy.SOURCE; import java.lang.annotation.Retention; /** * Elements marked with this annotation can be replaced by more efficient or * easier to read alternatives. For example some for-loops can be replaced by * Stream when the enclosing Ardulink module or one of it dependencies is * updated. To make search as easy as possible when upgrading, value * <b>should</b> be one of the constant values. */ @Retention(SOURCE) public @interface LapsedWith { String JDK8 = "JDK8"; String JDK9 = "JDK9"; String value(); String module() default ""; }
ardulink-core-util/src/main/java/org/ardulink/util/anno/LapsedWith.java
package org.ardulink.util.anno; /** * Elements marked with this annotation can be replaced by more efficient or * easier to read alternatives. For example some for-loops can be replaced by * Stream when the enclosing Ardulink module or one of it dependencies is * updated. To make search as easy as possible when upgrading, value * <b>should</b> be one of the constant values. */ public @interface LapsedWith { String JDK8 = "JDK8"; String JDK9 = "JDK9"; String value(); String module() default ""; }
using source RetentionPolicy
ardulink-core-util/src/main/java/org/ardulink/util/anno/LapsedWith.java
using source RetentionPolicy
Java
apache-2.0
d5cefcd21349060fb1d2cf7bc363c6a0e8739183
0
GerritCodeReview/gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,WANdisco/gerrit
// Copyright (C) 2013 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.notedb; import static com.google.common.base.Preconditions.checkArgument; import static com.google.gerrit.server.CommentsUtil.COMMENT_ORDER; import static com.google.gerrit.server.notedb.ChangeNotes.parseException; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.CharMatcher; import com.google.common.collect.ImmutableList; import com.google.common.collect.ListMultimap; import com.google.common.primitives.Ints; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.Comment; import com.google.gerrit.reviewdb.client.CommentRange; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.RevId; import com.google.gerrit.server.GerritPersonIdent; import com.google.gerrit.server.account.AccountCache; import com.google.gerrit.server.config.AnonymousCowardName; import com.google.gerrit.server.config.GerritServerConfig; import com.google.gerrit.server.config.GerritServerId; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.inject.Inject; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.sql.Timestamp; import java.text.ParseException; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import org.eclipse.jgit.errors.ConfigInvalidException; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.revwalk.FooterKey; import org.eclipse.jgit.util.GitDateFormatter; import org.eclipse.jgit.util.GitDateFormatter.Format; import org.eclipse.jgit.util.GitDateParser; import org.eclipse.jgit.util.MutableInteger; import org.eclipse.jgit.util.QuotedString; import org.eclipse.jgit.util.RawParseUtils; public class ChangeNoteUtil { public static final FooterKey FOOTER_ASSIGNEE = new FooterKey("Assignee"); public static final FooterKey FOOTER_BRANCH = new FooterKey("Branch"); public static final FooterKey FOOTER_CHANGE_ID = new FooterKey("Change-id"); public static final FooterKey FOOTER_COMMIT = new FooterKey("Commit"); public static final FooterKey FOOTER_CURRENT = new FooterKey("Current"); public static final FooterKey FOOTER_GROUPS = new FooterKey("Groups"); public static final FooterKey FOOTER_HASHTAGS = new FooterKey("Hashtags"); public static final FooterKey FOOTER_LABEL = new FooterKey("Label"); public static final FooterKey FOOTER_PATCH_SET = new FooterKey("Patch-set"); public static final FooterKey FOOTER_PATCH_SET_DESCRIPTION = new FooterKey("Patch-set-description"); public static final FooterKey FOOTER_PRIVATE = new FooterKey("Private"); public static final FooterKey FOOTER_READ_ONLY_UNTIL = new FooterKey("Read-only-until"); public static final FooterKey FOOTER_REAL_USER = new FooterKey("Real-user"); public static final FooterKey FOOTER_STATUS = new FooterKey("Status"); public static final FooterKey FOOTER_SUBJECT = new FooterKey("Subject"); public static final FooterKey FOOTER_SUBMISSION_ID = new FooterKey("Submission-id"); public static final FooterKey FOOTER_SUBMITTED_WITH = new FooterKey("Submitted-with"); public static final FooterKey FOOTER_TOPIC = new FooterKey("Topic"); public static final FooterKey FOOTER_TAG = new FooterKey("Tag"); public static final FooterKey FOOTER_WORK_IN_PROGRESS = new FooterKey("Work-in-progress"); public static final FooterKey FOOTER_REVERT_OF = new FooterKey("Revert-of"); private static final String AUTHOR = "Author"; private static final String BASE_PATCH_SET = "Base-for-patch-set"; private static final String COMMENT_RANGE = "Comment-range"; private static final String FILE = "File"; private static final String LENGTH = "Bytes"; private static final String PARENT = "Parent"; private static final String PARENT_NUMBER = "Parent-number"; private static final String PATCH_SET = "Patch-set"; private static final String REAL_AUTHOR = "Real-author"; private static final String REVISION = "Revision"; private static final String UUID = "UUID"; private static final String UNRESOLVED = "Unresolved"; private static final String TAG = FOOTER_TAG.getName(); public static String formatTime(PersonIdent ident, Timestamp t) { GitDateFormatter dateFormatter = new GitDateFormatter(Format.DEFAULT); // TODO(dborowitz): Use a ThreadLocal or use Joda. PersonIdent newIdent = new PersonIdent(ident, t); return dateFormatter.formatDate(newIdent); } static Gson newGson() { return new GsonBuilder() .registerTypeAdapter(Timestamp.class, new CommentTimestampAdapter().nullSafe()) .setPrettyPrinting() .create(); } private final AccountCache accountCache; private final PersonIdent serverIdent; private final String anonymousCowardName; private final String serverId; private final Gson gson = newGson(); private final boolean writeJson; @Inject public ChangeNoteUtil( AccountCache accountCache, @GerritPersonIdent PersonIdent serverIdent, @AnonymousCowardName String anonymousCowardName, @GerritServerId String serverId, @GerritServerConfig Config config) { this.accountCache = accountCache; this.serverIdent = serverIdent; this.anonymousCowardName = anonymousCowardName; this.serverId = serverId; this.writeJson = config.getBoolean("notedb", "writeJson", true); } @VisibleForTesting public PersonIdent newIdent( Account author, Date when, PersonIdent serverIdent, String anonymousCowardName) { return new PersonIdent( author.getName(anonymousCowardName), author.getId().get() + "@" + serverId, when, serverIdent.getTimeZone()); } public boolean getWriteJson() { return writeJson; } public Gson getGson() { return gson; } public String getServerId() { return serverId; } public Account.Id parseIdent(PersonIdent ident, Change.Id changeId) throws ConfigInvalidException { String email = ident.getEmailAddress(); int at = email.indexOf('@'); if (at >= 0) { String host = email.substring(at + 1, email.length()); if (host.equals(serverId)) { Integer id = Ints.tryParse(email.substring(0, at)); if (id != null) { return new Account.Id(id); } } } throw parseException(changeId, "invalid identity, expected <id>@%s: %s", serverId, email); } private static boolean match(byte[] note, MutableInteger p, byte[] expected) { int m = RawParseUtils.match(note, p.value, expected); return m == p.value + expected.length; } public List<Comment> parseNote(byte[] note, MutableInteger p, Change.Id changeId) throws ConfigInvalidException { if (p.value >= note.length) { return ImmutableList.of(); } Set<Comment.Key> seen = new HashSet<>(); List<Comment> result = new ArrayList<>(); int sizeOfNote = note.length; byte[] psb = PATCH_SET.getBytes(UTF_8); byte[] bpsb = BASE_PATCH_SET.getBytes(UTF_8); byte[] bpn = PARENT_NUMBER.getBytes(UTF_8); RevId revId = new RevId(parseStringField(note, p, changeId, REVISION)); String fileName = null; PatchSet.Id psId = null; boolean isForBase = false; Integer parentNumber = null; while (p.value < sizeOfNote) { boolean matchPs = match(note, p, psb); boolean matchBase = match(note, p, bpsb); if (matchPs) { fileName = null; psId = parsePsId(note, p, changeId, PATCH_SET); isForBase = false; } else if (matchBase) { fileName = null; psId = parsePsId(note, p, changeId, BASE_PATCH_SET); isForBase = true; if (match(note, p, bpn)) { parentNumber = parseParentNumber(note, p, changeId); } } else if (psId == null) { throw parseException(changeId, "missing %s or %s header", PATCH_SET, BASE_PATCH_SET); } Comment c = parseComment(note, p, fileName, psId, revId, isForBase, parentNumber); fileName = c.key.filename; if (!seen.add(c.key)) { throw parseException(changeId, "multiple comments for %s in note", c.key); } result.add(c); } return result; } private Comment parseComment( byte[] note, MutableInteger curr, String currentFileName, PatchSet.Id psId, RevId revId, boolean isForBase, Integer parentNumber) throws ConfigInvalidException { Change.Id changeId = psId.getParentKey(); // Check if there is a new file. boolean newFile = (RawParseUtils.match(note, curr.value, FILE.getBytes(UTF_8))) != -1; if (newFile) { // If so, parse the new file name. currentFileName = parseFilename(note, curr, changeId); } else if (currentFileName == null) { throw parseException(changeId, "could not parse %s", FILE); } CommentRange range = parseCommentRange(note, curr); if (range == null) { throw parseException(changeId, "could not parse %s", COMMENT_RANGE); } Timestamp commentTime = parseTimestamp(note, curr, changeId); Account.Id aId = parseAuthor(note, curr, changeId, AUTHOR); boolean hasRealAuthor = (RawParseUtils.match(note, curr.value, REAL_AUTHOR.getBytes(UTF_8))) != -1; Account.Id raId = null; if (hasRealAuthor) { raId = parseAuthor(note, curr, changeId, REAL_AUTHOR); } boolean hasParent = (RawParseUtils.match(note, curr.value, PARENT.getBytes(UTF_8))) != -1; String parentUUID = null; boolean unresolved = false; if (hasParent) { parentUUID = parseStringField(note, curr, changeId, PARENT); } boolean hasUnresolved = (RawParseUtils.match(note, curr.value, UNRESOLVED.getBytes(UTF_8))) != -1; if (hasUnresolved) { unresolved = parseBooleanField(note, curr, changeId, UNRESOLVED); } String uuid = parseStringField(note, curr, changeId, UUID); boolean hasTag = (RawParseUtils.match(note, curr.value, TAG.getBytes(UTF_8))) != -1; String tag = null; if (hasTag) { tag = parseStringField(note, curr, changeId, TAG); } int commentLength = parseCommentLength(note, curr, changeId); String message = RawParseUtils.decode(UTF_8, note, curr.value, curr.value + commentLength); checkResult(message, "message contents", changeId); Comment c = new Comment( new Comment.Key(uuid, currentFileName, psId.get()), aId, commentTime, isForBase ? (short) (parentNumber == null ? 0 : -parentNumber) : (short) 1, message, serverId, unresolved); c.lineNbr = range.getEndLine(); c.parentUuid = parentUUID; c.tag = tag; c.setRevId(revId); if (raId != null) { c.setRealAuthor(raId); } if (range.getStartCharacter() != -1) { c.setRange(range); } curr.value = RawParseUtils.nextLF(note, curr.value + commentLength); curr.value = RawParseUtils.nextLF(note, curr.value); return c; } private static String parseStringField( byte[] note, MutableInteger curr, Change.Id changeId, String fieldName) throws ConfigInvalidException { int endOfLine = RawParseUtils.nextLF(note, curr.value); checkHeaderLineFormat(note, curr, fieldName, changeId); int startOfField = RawParseUtils.endOfFooterLineKey(note, curr.value) + 2; curr.value = endOfLine; return RawParseUtils.decode(UTF_8, note, startOfField, endOfLine - 1); } /** * @return a comment range. If the comment range line in the note only has one number, we return a * CommentRange with that one number as the end line and the other fields as -1. If the * comment range line in the note contains a whole comment range, then we return a * CommentRange with all fields set. If the line is not correctly formatted, return null. */ private static CommentRange parseCommentRange(byte[] note, MutableInteger ptr) { CommentRange range = new CommentRange(-1, -1, -1, -1); int last = ptr.value; int startLine = RawParseUtils.parseBase10(note, ptr.value, ptr); if (ptr.value == last) { return null; } else if (note[ptr.value] == '\n') { range.setEndLine(startLine); ptr.value += 1; return range; } else if (note[ptr.value] == ':') { range.setStartLine(startLine); ptr.value += 1; } else { return null; } last = ptr.value; int startChar = RawParseUtils.parseBase10(note, ptr.value, ptr); if (ptr.value == last) { return null; } else if (note[ptr.value] == '-') { range.setStartCharacter(startChar); ptr.value += 1; } else { return null; } last = ptr.value; int endLine = RawParseUtils.parseBase10(note, ptr.value, ptr); if (ptr.value == last) { return null; } else if (note[ptr.value] == ':') { range.setEndLine(endLine); ptr.value += 1; } else { return null; } last = ptr.value; int endChar = RawParseUtils.parseBase10(note, ptr.value, ptr); if (ptr.value == last) { return null; } else if (note[ptr.value] == '\n') { range.setEndCharacter(endChar); ptr.value += 1; } else { return null; } return range; } private static PatchSet.Id parsePsId( byte[] note, MutableInteger curr, Change.Id changeId, String fieldName) throws ConfigInvalidException { checkHeaderLineFormat(note, curr, fieldName, changeId); int startOfPsId = RawParseUtils.endOfFooterLineKey(note, curr.value) + 1; MutableInteger i = new MutableInteger(); int patchSetId = RawParseUtils.parseBase10(note, startOfPsId, i); int endOfLine = RawParseUtils.nextLF(note, curr.value); if (i.value != endOfLine - 1) { throw parseException(changeId, "could not parse %s", fieldName); } checkResult(patchSetId, "patchset id", changeId); curr.value = endOfLine; return new PatchSet.Id(changeId, patchSetId); } private static Integer parseParentNumber(byte[] note, MutableInteger curr, Change.Id changeId) throws ConfigInvalidException { checkHeaderLineFormat(note, curr, PARENT_NUMBER, changeId); int start = RawParseUtils.endOfFooterLineKey(note, curr.value) + 1; MutableInteger i = new MutableInteger(); int parentNumber = RawParseUtils.parseBase10(note, start, i); int endOfLine = RawParseUtils.nextLF(note, curr.value); if (i.value != endOfLine - 1) { throw parseException(changeId, "could not parse %s", PARENT_NUMBER); } checkResult(parentNumber, "parent number", changeId); curr.value = endOfLine; return Integer.valueOf(parentNumber); } private static String parseFilename(byte[] note, MutableInteger curr, Change.Id changeId) throws ConfigInvalidException { checkHeaderLineFormat(note, curr, FILE, changeId); int startOfFileName = RawParseUtils.endOfFooterLineKey(note, curr.value) + 2; int endOfLine = RawParseUtils.nextLF(note, curr.value); curr.value = endOfLine; curr.value = RawParseUtils.nextLF(note, curr.value); return QuotedString.GIT_PATH.dequote( RawParseUtils.decode(UTF_8, note, startOfFileName, endOfLine - 1)); } private static Timestamp parseTimestamp(byte[] note, MutableInteger curr, Change.Id changeId) throws ConfigInvalidException { int endOfLine = RawParseUtils.nextLF(note, curr.value); Timestamp commentTime; String dateString = RawParseUtils.decode(UTF_8, note, curr.value, endOfLine - 1); try { commentTime = new Timestamp(GitDateParser.parse(dateString, null, Locale.US).getTime()); } catch (ParseException e) { throw new ConfigInvalidException("could not parse comment timestamp", e); } curr.value = endOfLine; return checkResult(commentTime, "comment timestamp", changeId); } private Account.Id parseAuthor( byte[] note, MutableInteger curr, Change.Id changeId, String fieldName) throws ConfigInvalidException { checkHeaderLineFormat(note, curr, fieldName, changeId); int startOfAccountId = RawParseUtils.endOfFooterLineKey(note, curr.value) + 2; PersonIdent ident = RawParseUtils.parsePersonIdent(note, startOfAccountId); Account.Id aId = parseIdent(ident, changeId); curr.value = RawParseUtils.nextLF(note, curr.value); return checkResult(aId, fieldName, changeId); } private static int parseCommentLength(byte[] note, MutableInteger curr, Change.Id changeId) throws ConfigInvalidException { checkHeaderLineFormat(note, curr, LENGTH, changeId); int startOfLength = RawParseUtils.endOfFooterLineKey(note, curr.value) + 1; MutableInteger i = new MutableInteger(); i.value = startOfLength; int commentLength = RawParseUtils.parseBase10(note, startOfLength, i); if (i.value == startOfLength) { throw parseException(changeId, "could not parse %s", LENGTH); } int endOfLine = RawParseUtils.nextLF(note, curr.value); if (i.value != endOfLine - 1) { throw parseException(changeId, "could not parse %s", LENGTH); } curr.value = endOfLine; return checkResult(commentLength, "comment length", changeId); } private boolean parseBooleanField( byte[] note, MutableInteger curr, Change.Id changeId, String fieldName) throws ConfigInvalidException { String str = parseStringField(note, curr, changeId, fieldName); if ("true".equalsIgnoreCase(str)) { return true; } else if ("false".equalsIgnoreCase(str)) { return false; } throw parseException(changeId, "invalid boolean for %s: %s", fieldName, str); } private static <T> T checkResult(T o, String fieldName, Change.Id changeId) throws ConfigInvalidException { if (o == null) { throw parseException(changeId, "could not parse %s", fieldName); } return o; } private static int checkResult(int i, String fieldName, Change.Id changeId) throws ConfigInvalidException { if (i <= 0) { throw parseException(changeId, "could not parse %s", fieldName); } return i; } private void appendHeaderField(PrintWriter writer, String field, String value) { writer.print(field); writer.print(": "); writer.print(value); writer.print('\n'); } private static void checkHeaderLineFormat( byte[] note, MutableInteger curr, String fieldName, Change.Id changeId) throws ConfigInvalidException { boolean correct = RawParseUtils.match(note, curr.value, fieldName.getBytes(UTF_8)) != -1; int p = curr.value + fieldName.length(); correct &= (p < note.length && note[p] == ':'); p++; correct &= (p < note.length && note[p] == ' '); if (!correct) { throw parseException(changeId, "could not parse %s", fieldName); } } /** * Build a note that contains the metadata for and the contents of all of the comments in the * given comments. * * @param comments Comments to be written to the output stream, keyed by patch set ID; multiple * patch sets are allowed since base revisions may be shared across patch sets. All of the * comments must share the same RevId, and all the comments for a given patch set must have * the same side. * @param out output stream to write to. */ void buildNote(ListMultimap<Integer, Comment> comments, OutputStream out) { if (comments.isEmpty()) { return; } List<Integer> psIds = new ArrayList<>(comments.keySet()); Collections.sort(psIds); OutputStreamWriter streamWriter = new OutputStreamWriter(out, UTF_8); try (PrintWriter writer = new PrintWriter(streamWriter)) { String revId = comments.values().iterator().next().revId; appendHeaderField(writer, REVISION, revId); for (int psId : psIds) { List<Comment> psComments = COMMENT_ORDER.sortedCopy(comments.get(psId)); Comment first = psComments.get(0); short side = first.side; appendHeaderField(writer, side <= 0 ? BASE_PATCH_SET : PATCH_SET, Integer.toString(psId)); if (side < 0) { appendHeaderField(writer, PARENT_NUMBER, Integer.toString(-side)); } String currentFilename = null; for (Comment c : psComments) { checkArgument( revId.equals(c.revId), "All comments being added must have all the same RevId. The " + "comment below does not have the same RevId as the others " + "(%s).\n%s", revId, c); checkArgument( side == c.side, "All comments being added must all have the same side. The " + "comment below does not have the same side as the others " + "(%s).\n%s", side, c); String commentFilename = QuotedString.GIT_PATH.quote(c.key.filename); if (!commentFilename.equals(currentFilename)) { currentFilename = commentFilename; writer.print("File: "); writer.print(commentFilename); writer.print("\n\n"); } appendOneComment(writer, c); } } } } private void appendOneComment(PrintWriter writer, Comment c) { // The CommentRange field for a comment is allowed to be null. If it is // null, then in the first line, we simply use the line number field for a // comment instead. If it isn't null, we write the comment range itself. Comment.Range range = c.range; if (range != null) { writer.print(range.startLine); writer.print(':'); writer.print(range.startChar); writer.print('-'); writer.print(range.endLine); writer.print(':'); writer.print(range.endChar); } else { writer.print(c.lineNbr); } writer.print("\n"); writer.print(formatTime(serverIdent, c.writtenOn)); writer.print("\n"); appendIdent(writer, AUTHOR, c.author.getId(), c.writtenOn); if (!c.getRealAuthor().equals(c.author)) { appendIdent(writer, REAL_AUTHOR, c.getRealAuthor().getId(), c.writtenOn); } String parent = c.parentUuid; if (parent != null) { appendHeaderField(writer, PARENT, parent); } appendHeaderField(writer, UNRESOLVED, Boolean.toString(c.unresolved)); appendHeaderField(writer, UUID, c.key.uuid); if (c.tag != null) { appendHeaderField(writer, TAG, c.tag); } byte[] messageBytes = c.message.getBytes(UTF_8); appendHeaderField(writer, LENGTH, Integer.toString(messageBytes.length)); writer.print(c.message); writer.print("\n\n"); } private void appendIdent(PrintWriter writer, String header, Account.Id id, Timestamp ts) { PersonIdent ident = newIdent(accountCache.get(id).getAccount(), ts, serverIdent, anonymousCowardName); StringBuilder name = new StringBuilder(); PersonIdent.appendSanitized(name, ident.getName()); name.append(" <"); PersonIdent.appendSanitized(name, ident.getEmailAddress()); name.append('>'); appendHeaderField(writer, header, name.toString()); } private static final CharMatcher INVALID_FOOTER_CHARS = CharMatcher.anyOf("\r\n\0"); static String sanitizeFooter(String value) { // Remove characters that would confuse JGit's footer parser if they were // included in footer values, for example by splitting the footer block into // multiple paragraphs. // // One painful example: RevCommit#getShorMessage() might return a message // containing "\r\r", which RevCommit#getFooterLines() will treat as an // empty paragraph for the purposes of footer parsing. return INVALID_FOOTER_CHARS.trimAndCollapseFrom(value, ' '); } }
gerrit-server/src/main/java/com/google/gerrit/server/notedb/ChangeNoteUtil.java
// Copyright (C) 2013 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.notedb; import static com.google.common.base.Preconditions.checkArgument; import static com.google.gerrit.server.CommentsUtil.COMMENT_ORDER; import static com.google.gerrit.server.notedb.ChangeNotes.parseException; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.CharMatcher; import com.google.common.collect.ImmutableList; import com.google.common.collect.ListMultimap; import com.google.common.primitives.Ints; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.Comment; import com.google.gerrit.reviewdb.client.CommentRange; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.RevId; import com.google.gerrit.server.GerritPersonIdent; import com.google.gerrit.server.account.AccountCache; import com.google.gerrit.server.config.AnonymousCowardName; import com.google.gerrit.server.config.GerritServerConfig; import com.google.gerrit.server.config.GerritServerId; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.inject.Inject; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.sql.Timestamp; import java.text.ParseException; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import org.eclipse.jgit.errors.ConfigInvalidException; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.revwalk.FooterKey; import org.eclipse.jgit.util.GitDateFormatter; import org.eclipse.jgit.util.GitDateFormatter.Format; import org.eclipse.jgit.util.GitDateParser; import org.eclipse.jgit.util.MutableInteger; import org.eclipse.jgit.util.QuotedString; import org.eclipse.jgit.util.RawParseUtils; public class ChangeNoteUtil { public static final FooterKey FOOTER_ASSIGNEE = new FooterKey("Assignee"); public static final FooterKey FOOTER_BRANCH = new FooterKey("Branch"); public static final FooterKey FOOTER_CHANGE_ID = new FooterKey("Change-id"); public static final FooterKey FOOTER_COMMIT = new FooterKey("Commit"); public static final FooterKey FOOTER_CURRENT = new FooterKey("Current"); public static final FooterKey FOOTER_GROUPS = new FooterKey("Groups"); public static final FooterKey FOOTER_HASHTAGS = new FooterKey("Hashtags"); public static final FooterKey FOOTER_LABEL = new FooterKey("Label"); public static final FooterKey FOOTER_PATCH_SET = new FooterKey("Patch-set"); public static final FooterKey FOOTER_PATCH_SET_DESCRIPTION = new FooterKey("Patch-set-description"); public static final FooterKey FOOTER_PRIVATE = new FooterKey("Private"); public static final FooterKey FOOTER_READ_ONLY_UNTIL = new FooterKey("Read-only-until"); public static final FooterKey FOOTER_REAL_USER = new FooterKey("Real-user"); public static final FooterKey FOOTER_STATUS = new FooterKey("Status"); public static final FooterKey FOOTER_SUBJECT = new FooterKey("Subject"); public static final FooterKey FOOTER_SUBMISSION_ID = new FooterKey("Submission-id"); public static final FooterKey FOOTER_SUBMITTED_WITH = new FooterKey("Submitted-with"); public static final FooterKey FOOTER_TOPIC = new FooterKey("Topic"); public static final FooterKey FOOTER_TAG = new FooterKey("Tag"); public static final FooterKey FOOTER_WORK_IN_PROGRESS = new FooterKey("Work-in-progress"); public static final FooterKey FOOTER_REVERT_OF = new FooterKey("Revert-of"); private static final String AUTHOR = "Author"; private static final String BASE_PATCH_SET = "Base-for-patch-set"; private static final String COMMENT_RANGE = "Comment-range"; private static final String FILE = "File"; private static final String LENGTH = "Bytes"; private static final String PARENT = "Parent"; private static final String PARENT_NUMBER = "Parent-number"; private static final String PATCH_SET = "Patch-set"; private static final String REAL_AUTHOR = "Real-author"; private static final String REVISION = "Revision"; private static final String UUID = "UUID"; private static final String UNRESOLVED = "Unresolved"; private static final String TAG = FOOTER_TAG.getName(); public static String formatTime(PersonIdent ident, Timestamp t) { GitDateFormatter dateFormatter = new GitDateFormatter(Format.DEFAULT); // TODO(dborowitz): Use a ThreadLocal or use Joda. PersonIdent newIdent = new PersonIdent(ident, t); return dateFormatter.formatDate(newIdent); } static Gson newGson() { return new GsonBuilder() .registerTypeAdapter(Timestamp.class, new CommentTimestampAdapter().nullSafe()) .setPrettyPrinting() .create(); } private final AccountCache accountCache; private final PersonIdent serverIdent; private final String anonymousCowardName; private final String serverId; private final Gson gson = newGson(); private final boolean writeJson; @Inject public ChangeNoteUtil( AccountCache accountCache, @GerritPersonIdent PersonIdent serverIdent, @AnonymousCowardName String anonymousCowardName, @GerritServerId String serverId, @GerritServerConfig Config config) { this.accountCache = accountCache; this.serverIdent = serverIdent; this.anonymousCowardName = anonymousCowardName; this.serverId = serverId; this.writeJson = config.getBoolean("notedb", "writeJson", false); } @VisibleForTesting public PersonIdent newIdent( Account author, Date when, PersonIdent serverIdent, String anonymousCowardName) { return new PersonIdent( author.getName(anonymousCowardName), author.getId().get() + "@" + serverId, when, serverIdent.getTimeZone()); } public boolean getWriteJson() { return writeJson; } public Gson getGson() { return gson; } public String getServerId() { return serverId; } public Account.Id parseIdent(PersonIdent ident, Change.Id changeId) throws ConfigInvalidException { String email = ident.getEmailAddress(); int at = email.indexOf('@'); if (at >= 0) { String host = email.substring(at + 1, email.length()); if (host.equals(serverId)) { Integer id = Ints.tryParse(email.substring(0, at)); if (id != null) { return new Account.Id(id); } } } throw parseException(changeId, "invalid identity, expected <id>@%s: %s", serverId, email); } private static boolean match(byte[] note, MutableInteger p, byte[] expected) { int m = RawParseUtils.match(note, p.value, expected); return m == p.value + expected.length; } public List<Comment> parseNote(byte[] note, MutableInteger p, Change.Id changeId) throws ConfigInvalidException { if (p.value >= note.length) { return ImmutableList.of(); } Set<Comment.Key> seen = new HashSet<>(); List<Comment> result = new ArrayList<>(); int sizeOfNote = note.length; byte[] psb = PATCH_SET.getBytes(UTF_8); byte[] bpsb = BASE_PATCH_SET.getBytes(UTF_8); byte[] bpn = PARENT_NUMBER.getBytes(UTF_8); RevId revId = new RevId(parseStringField(note, p, changeId, REVISION)); String fileName = null; PatchSet.Id psId = null; boolean isForBase = false; Integer parentNumber = null; while (p.value < sizeOfNote) { boolean matchPs = match(note, p, psb); boolean matchBase = match(note, p, bpsb); if (matchPs) { fileName = null; psId = parsePsId(note, p, changeId, PATCH_SET); isForBase = false; } else if (matchBase) { fileName = null; psId = parsePsId(note, p, changeId, BASE_PATCH_SET); isForBase = true; if (match(note, p, bpn)) { parentNumber = parseParentNumber(note, p, changeId); } } else if (psId == null) { throw parseException(changeId, "missing %s or %s header", PATCH_SET, BASE_PATCH_SET); } Comment c = parseComment(note, p, fileName, psId, revId, isForBase, parentNumber); fileName = c.key.filename; if (!seen.add(c.key)) { throw parseException(changeId, "multiple comments for %s in note", c.key); } result.add(c); } return result; } private Comment parseComment( byte[] note, MutableInteger curr, String currentFileName, PatchSet.Id psId, RevId revId, boolean isForBase, Integer parentNumber) throws ConfigInvalidException { Change.Id changeId = psId.getParentKey(); // Check if there is a new file. boolean newFile = (RawParseUtils.match(note, curr.value, FILE.getBytes(UTF_8))) != -1; if (newFile) { // If so, parse the new file name. currentFileName = parseFilename(note, curr, changeId); } else if (currentFileName == null) { throw parseException(changeId, "could not parse %s", FILE); } CommentRange range = parseCommentRange(note, curr); if (range == null) { throw parseException(changeId, "could not parse %s", COMMENT_RANGE); } Timestamp commentTime = parseTimestamp(note, curr, changeId); Account.Id aId = parseAuthor(note, curr, changeId, AUTHOR); boolean hasRealAuthor = (RawParseUtils.match(note, curr.value, REAL_AUTHOR.getBytes(UTF_8))) != -1; Account.Id raId = null; if (hasRealAuthor) { raId = parseAuthor(note, curr, changeId, REAL_AUTHOR); } boolean hasParent = (RawParseUtils.match(note, curr.value, PARENT.getBytes(UTF_8))) != -1; String parentUUID = null; boolean unresolved = false; if (hasParent) { parentUUID = parseStringField(note, curr, changeId, PARENT); } boolean hasUnresolved = (RawParseUtils.match(note, curr.value, UNRESOLVED.getBytes(UTF_8))) != -1; if (hasUnresolved) { unresolved = parseBooleanField(note, curr, changeId, UNRESOLVED); } String uuid = parseStringField(note, curr, changeId, UUID); boolean hasTag = (RawParseUtils.match(note, curr.value, TAG.getBytes(UTF_8))) != -1; String tag = null; if (hasTag) { tag = parseStringField(note, curr, changeId, TAG); } int commentLength = parseCommentLength(note, curr, changeId); String message = RawParseUtils.decode(UTF_8, note, curr.value, curr.value + commentLength); checkResult(message, "message contents", changeId); Comment c = new Comment( new Comment.Key(uuid, currentFileName, psId.get()), aId, commentTime, isForBase ? (short) (parentNumber == null ? 0 : -parentNumber) : (short) 1, message, serverId, unresolved); c.lineNbr = range.getEndLine(); c.parentUuid = parentUUID; c.tag = tag; c.setRevId(revId); if (raId != null) { c.setRealAuthor(raId); } if (range.getStartCharacter() != -1) { c.setRange(range); } curr.value = RawParseUtils.nextLF(note, curr.value + commentLength); curr.value = RawParseUtils.nextLF(note, curr.value); return c; } private static String parseStringField( byte[] note, MutableInteger curr, Change.Id changeId, String fieldName) throws ConfigInvalidException { int endOfLine = RawParseUtils.nextLF(note, curr.value); checkHeaderLineFormat(note, curr, fieldName, changeId); int startOfField = RawParseUtils.endOfFooterLineKey(note, curr.value) + 2; curr.value = endOfLine; return RawParseUtils.decode(UTF_8, note, startOfField, endOfLine - 1); } /** * @return a comment range. If the comment range line in the note only has one number, we return a * CommentRange with that one number as the end line and the other fields as -1. If the * comment range line in the note contains a whole comment range, then we return a * CommentRange with all fields set. If the line is not correctly formatted, return null. */ private static CommentRange parseCommentRange(byte[] note, MutableInteger ptr) { CommentRange range = new CommentRange(-1, -1, -1, -1); int last = ptr.value; int startLine = RawParseUtils.parseBase10(note, ptr.value, ptr); if (ptr.value == last) { return null; } else if (note[ptr.value] == '\n') { range.setEndLine(startLine); ptr.value += 1; return range; } else if (note[ptr.value] == ':') { range.setStartLine(startLine); ptr.value += 1; } else { return null; } last = ptr.value; int startChar = RawParseUtils.parseBase10(note, ptr.value, ptr); if (ptr.value == last) { return null; } else if (note[ptr.value] == '-') { range.setStartCharacter(startChar); ptr.value += 1; } else { return null; } last = ptr.value; int endLine = RawParseUtils.parseBase10(note, ptr.value, ptr); if (ptr.value == last) { return null; } else if (note[ptr.value] == ':') { range.setEndLine(endLine); ptr.value += 1; } else { return null; } last = ptr.value; int endChar = RawParseUtils.parseBase10(note, ptr.value, ptr); if (ptr.value == last) { return null; } else if (note[ptr.value] == '\n') { range.setEndCharacter(endChar); ptr.value += 1; } else { return null; } return range; } private static PatchSet.Id parsePsId( byte[] note, MutableInteger curr, Change.Id changeId, String fieldName) throws ConfigInvalidException { checkHeaderLineFormat(note, curr, fieldName, changeId); int startOfPsId = RawParseUtils.endOfFooterLineKey(note, curr.value) + 1; MutableInteger i = new MutableInteger(); int patchSetId = RawParseUtils.parseBase10(note, startOfPsId, i); int endOfLine = RawParseUtils.nextLF(note, curr.value); if (i.value != endOfLine - 1) { throw parseException(changeId, "could not parse %s", fieldName); } checkResult(patchSetId, "patchset id", changeId); curr.value = endOfLine; return new PatchSet.Id(changeId, patchSetId); } private static Integer parseParentNumber(byte[] note, MutableInteger curr, Change.Id changeId) throws ConfigInvalidException { checkHeaderLineFormat(note, curr, PARENT_NUMBER, changeId); int start = RawParseUtils.endOfFooterLineKey(note, curr.value) + 1; MutableInteger i = new MutableInteger(); int parentNumber = RawParseUtils.parseBase10(note, start, i); int endOfLine = RawParseUtils.nextLF(note, curr.value); if (i.value != endOfLine - 1) { throw parseException(changeId, "could not parse %s", PARENT_NUMBER); } checkResult(parentNumber, "parent number", changeId); curr.value = endOfLine; return Integer.valueOf(parentNumber); } private static String parseFilename(byte[] note, MutableInteger curr, Change.Id changeId) throws ConfigInvalidException { checkHeaderLineFormat(note, curr, FILE, changeId); int startOfFileName = RawParseUtils.endOfFooterLineKey(note, curr.value) + 2; int endOfLine = RawParseUtils.nextLF(note, curr.value); curr.value = endOfLine; curr.value = RawParseUtils.nextLF(note, curr.value); return QuotedString.GIT_PATH.dequote( RawParseUtils.decode(UTF_8, note, startOfFileName, endOfLine - 1)); } private static Timestamp parseTimestamp(byte[] note, MutableInteger curr, Change.Id changeId) throws ConfigInvalidException { int endOfLine = RawParseUtils.nextLF(note, curr.value); Timestamp commentTime; String dateString = RawParseUtils.decode(UTF_8, note, curr.value, endOfLine - 1); try { commentTime = new Timestamp(GitDateParser.parse(dateString, null, Locale.US).getTime()); } catch (ParseException e) { throw new ConfigInvalidException("could not parse comment timestamp", e); } curr.value = endOfLine; return checkResult(commentTime, "comment timestamp", changeId); } private Account.Id parseAuthor( byte[] note, MutableInteger curr, Change.Id changeId, String fieldName) throws ConfigInvalidException { checkHeaderLineFormat(note, curr, fieldName, changeId); int startOfAccountId = RawParseUtils.endOfFooterLineKey(note, curr.value) + 2; PersonIdent ident = RawParseUtils.parsePersonIdent(note, startOfAccountId); Account.Id aId = parseIdent(ident, changeId); curr.value = RawParseUtils.nextLF(note, curr.value); return checkResult(aId, fieldName, changeId); } private static int parseCommentLength(byte[] note, MutableInteger curr, Change.Id changeId) throws ConfigInvalidException { checkHeaderLineFormat(note, curr, LENGTH, changeId); int startOfLength = RawParseUtils.endOfFooterLineKey(note, curr.value) + 1; MutableInteger i = new MutableInteger(); i.value = startOfLength; int commentLength = RawParseUtils.parseBase10(note, startOfLength, i); if (i.value == startOfLength) { throw parseException(changeId, "could not parse %s", LENGTH); } int endOfLine = RawParseUtils.nextLF(note, curr.value); if (i.value != endOfLine - 1) { throw parseException(changeId, "could not parse %s", LENGTH); } curr.value = endOfLine; return checkResult(commentLength, "comment length", changeId); } private boolean parseBooleanField( byte[] note, MutableInteger curr, Change.Id changeId, String fieldName) throws ConfigInvalidException { String str = parseStringField(note, curr, changeId, fieldName); if ("true".equalsIgnoreCase(str)) { return true; } else if ("false".equalsIgnoreCase(str)) { return false; } throw parseException(changeId, "invalid boolean for %s: %s", fieldName, str); } private static <T> T checkResult(T o, String fieldName, Change.Id changeId) throws ConfigInvalidException { if (o == null) { throw parseException(changeId, "could not parse %s", fieldName); } return o; } private static int checkResult(int i, String fieldName, Change.Id changeId) throws ConfigInvalidException { if (i <= 0) { throw parseException(changeId, "could not parse %s", fieldName); } return i; } private void appendHeaderField(PrintWriter writer, String field, String value) { writer.print(field); writer.print(": "); writer.print(value); writer.print('\n'); } private static void checkHeaderLineFormat( byte[] note, MutableInteger curr, String fieldName, Change.Id changeId) throws ConfigInvalidException { boolean correct = RawParseUtils.match(note, curr.value, fieldName.getBytes(UTF_8)) != -1; int p = curr.value + fieldName.length(); correct &= (p < note.length && note[p] == ':'); p++; correct &= (p < note.length && note[p] == ' '); if (!correct) { throw parseException(changeId, "could not parse %s", fieldName); } } /** * Build a note that contains the metadata for and the contents of all of the comments in the * given comments. * * @param comments Comments to be written to the output stream, keyed by patch set ID; multiple * patch sets are allowed since base revisions may be shared across patch sets. All of the * comments must share the same RevId, and all the comments for a given patch set must have * the same side. * @param out output stream to write to. */ void buildNote(ListMultimap<Integer, Comment> comments, OutputStream out) { if (comments.isEmpty()) { return; } List<Integer> psIds = new ArrayList<>(comments.keySet()); Collections.sort(psIds); OutputStreamWriter streamWriter = new OutputStreamWriter(out, UTF_8); try (PrintWriter writer = new PrintWriter(streamWriter)) { String revId = comments.values().iterator().next().revId; appendHeaderField(writer, REVISION, revId); for (int psId : psIds) { List<Comment> psComments = COMMENT_ORDER.sortedCopy(comments.get(psId)); Comment first = psComments.get(0); short side = first.side; appendHeaderField(writer, side <= 0 ? BASE_PATCH_SET : PATCH_SET, Integer.toString(psId)); if (side < 0) { appendHeaderField(writer, PARENT_NUMBER, Integer.toString(-side)); } String currentFilename = null; for (Comment c : psComments) { checkArgument( revId.equals(c.revId), "All comments being added must have all the same RevId. The " + "comment below does not have the same RevId as the others " + "(%s).\n%s", revId, c); checkArgument( side == c.side, "All comments being added must all have the same side. The " + "comment below does not have the same side as the others " + "(%s).\n%s", side, c); String commentFilename = QuotedString.GIT_PATH.quote(c.key.filename); if (!commentFilename.equals(currentFilename)) { currentFilename = commentFilename; writer.print("File: "); writer.print(commentFilename); writer.print("\n\n"); } appendOneComment(writer, c); } } } } private void appendOneComment(PrintWriter writer, Comment c) { // The CommentRange field for a comment is allowed to be null. If it is // null, then in the first line, we simply use the line number field for a // comment instead. If it isn't null, we write the comment range itself. Comment.Range range = c.range; if (range != null) { writer.print(range.startLine); writer.print(':'); writer.print(range.startChar); writer.print('-'); writer.print(range.endLine); writer.print(':'); writer.print(range.endChar); } else { writer.print(c.lineNbr); } writer.print("\n"); writer.print(formatTime(serverIdent, c.writtenOn)); writer.print("\n"); appendIdent(writer, AUTHOR, c.author.getId(), c.writtenOn); if (!c.getRealAuthor().equals(c.author)) { appendIdent(writer, REAL_AUTHOR, c.getRealAuthor().getId(), c.writtenOn); } String parent = c.parentUuid; if (parent != null) { appendHeaderField(writer, PARENT, parent); } appendHeaderField(writer, UNRESOLVED, Boolean.toString(c.unresolved)); appendHeaderField(writer, UUID, c.key.uuid); if (c.tag != null) { appendHeaderField(writer, TAG, c.tag); } byte[] messageBytes = c.message.getBytes(UTF_8); appendHeaderField(writer, LENGTH, Integer.toString(messageBytes.length)); writer.print(c.message); writer.print("\n\n"); } private void appendIdent(PrintWriter writer, String header, Account.Id id, Timestamp ts) { PersonIdent ident = newIdent(accountCache.get(id).getAccount(), ts, serverIdent, anonymousCowardName); StringBuilder name = new StringBuilder(); PersonIdent.appendSanitized(name, ident.getName()); name.append(" <"); PersonIdent.appendSanitized(name, ident.getEmailAddress()); name.append('>'); appendHeaderField(writer, header, name.toString()); } private static final CharMatcher INVALID_FOOTER_CHARS = CharMatcher.anyOf("\r\n\0"); static String sanitizeFooter(String value) { // Remove characters that would confuse JGit's footer parser if they were // included in footer values, for example by splitting the footer block into // multiple paragraphs. // // One painful example: RevCommit#getShorMessage() might return a message // containing "\r\r", which RevCommit#getFooterLines() will treat as an // empty paragraph for the purposes of footer parsing. return INVALID_FOOTER_CHARS.trimAndCollapseFrom(value, ' '); } }
Default to writing comments in JSON format The default in 2.15-rc2 and earlier was inadvertently left to use the legacy comment format. Combined with the fact that NoteDb is the default for changes in 2.15-rc2, this means that there may be sites in the wild that have notes written in the legacy format. Ultimately, we will want to migrate those sites to use the JSON format, so we can delete the legacy codepaths. However, we can't do schema migrations in a stable branch, so the legacy code will have to live on for another release cycle. The alternative would be to provide a standalone migration tool in the stable branch, but that puts an additional burden on site admins, and doesn't provide any real benefit other than allowing us to delete some code sooner. All that being said, it still makes sense to change the default in the stable branch, if for no other reason than to minimize the time spent in migrating data on the next upgrade. Change-Id: I0a1116317fa5740e49548eb2349d14a5bb7e83e1
gerrit-server/src/main/java/com/google/gerrit/server/notedb/ChangeNoteUtil.java
Default to writing comments in JSON format
Java
apache-2.0
ebd9ec8acb87093b5021ff736db5662a99a11c44
0
paulswithers/XPagesExtensionLibrary,paulswithers/XPagesExtensionLibrary,paulswithers/XPagesExtensionLibrary
/* * Copyright IBM Corp. 2010 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.ibm.xsp.extlib.relational.util; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.sql.DataSource; import com.ibm.commons.util.StringUtil; import com.ibm.commons.vfs.VFS; import com.ibm.commons.vfs.VFSException; import com.ibm.commons.vfs.VFSFile; import com.ibm.commons.vfs.VFSObjectCache; import com.ibm.designer.runtime.Application; import com.ibm.xsp.FacesExceptionEx; import com.ibm.xsp.extlib.relational.component.jdbc.IJdbcConnectionManager; import com.ibm.xsp.extlib.relational.jdbc.jndi.JndiRegistry; import com.ibm.xsp.util.FacesUtil; public class JdbcUtil { public static String JDBC_ROOT = "/WEB-INF/jdbc"; // $NON-NLS-1$ // ======================================================================== // Access to JDBC Connections // ======================================================================== /** * Create a JDBC connection from a URL. The connection is actually created * and added to a FacesContextListener */ public static Connection createConnectionFromUrl(FacesContext context, String connectionUrl) throws SQLException { if (StringUtil.isNotEmpty(connectionUrl)) { return DriverManager.getConnection(connectionUrl); } return null; } /** * Create a JDBC connection. The connection is actually created and added to * a FacesContextListener */ public static Connection getConnection(FacesContext context, String name) throws SQLException { return createNamedConnection(context, name); } /** * Create a JDBC connection. The connection is actually created and added to * a FacesContextListener */ public static Connection createNamedConnection(FacesContext context, String name) throws SQLException { try { String jndiName = name; if (!jndiName.startsWith("java:")) { // $NON-NLS-1$ jndiName = JndiRegistry.getJNDIBindName(name); } InitialContext ctx = new InitialContext(); DataSource ds = (DataSource) ctx.lookup(jndiName); if (ds != null) { return ds.getConnection(); } return null; } catch (NamingException ex) { throw (SQLException) new SQLException().initCause(ex); } } /** * Get a connection from a connection manager. * * @param context * @param name * @param shared * @return * @throws SQLException */ public static Connection createManagedConnection(FacesContext context, UIComponent from, String name) throws SQLException { if (from == null) { from = context.getViewRoot(); } IJdbcConnectionManager manager = findConnectionManager(context, from, name); if (manager == null) { throw new FacesExceptionEx(null, StringUtil.format("Unknown {0} {1}", "ConnectionManager", name)); // $NLX-JdbcUtil.Unknown01-1$ // $NON-NLS-2$ } return manager.getConnection(); } /** * Find a connection manager by name * * @param context * @param name * @param shared * @return * @throws SQLException */ public static IJdbcConnectionManager findConnectionManager(FacesContext context, UIComponent from, String name) throws SQLException { UIComponent c = FacesUtil.getComponentFor(from, name); if (c != null) { return (IJdbcConnectionManager) c; } return null; } /** * Check if table had been created. */ public static boolean tableExists(Connection c, String schema) throws SQLException { return tableExists(c, schema, new String[] { "TABLE" }); // $NON-NLS-1$ } public static boolean tableExists(Connection c, String schema, String[] types) throws SQLException { ResultSet tables = c.getMetaData().getTables(null, schema, null, types); //$NON-NLS-1$ try { if (tables.next()) { return true; } else { return false; } } finally { tables.close(); } } /** * Check if a table exists. */ public static boolean tableExists(Connection c, String schema, String tableName) throws SQLException { return tableExists(c, schema, tableName, new String[] { "TABLE" }); // $NON-NLS-1$ } public static boolean tableExists(Connection c, String schema, String tableName, String[] types) throws SQLException { ResultSet tables = c.getMetaData().getTables(null, schema, tableName, types); //$NON-NLS-1$ try { if (tables.next()) { return true; } else { return false; } } finally { tables.close(); } } /** * Get the list of tables */ public static List<String> listTables(Connection c, String schema, String tableName) throws SQLException { return listTables(c, schema, tableName, new String[] { "TABLE" }); // $NON-NLS-1$ } public static List<String> listTables(Connection c, String schema, String tableName, String[] types) throws SQLException { ResultSet tables = c.getMetaData().getTables(null, schema, tableName, types); //$NON-NLS-1$ try { ArrayList<String> l = new ArrayList<String>(); while (tables.next()) { String sc = tables.getString("TABLE_SCHEM"); // $NON-NLS-1$ String tb = tables.getString("TABLE_NAME"); // $NON-NLS-1$ if (StringUtil.isEmpty(sc)) { l.add(tb); } else { l.add(StringUtil.format("{0}.{1}", sc, tb)); } } return l; } finally { tables.close(); } } /** * Read a SQL file from the resources. */ public static String readSqlFile(String fileName) { if (StringUtil.isNotEmpty(fileName)) { Application app = Application.get(); VFSObjectCache c = app.getVFSCache(); try { String fullPath = JDBC_ROOT + VFS.SEPARATOR + fileName; if (!fullPath.endsWith(".sql")) { // $NON-NLS-1$ fullPath = fullPath + ".sql"; // $NON-NLS-1$ } return (String) c.get(fullPath, new VFSObjectCache.ObjectLoader() { // $NON-NLS-1$ public Object loadObject(VFSFile file) throws VFSException { if (file.exists()) { try { String s = file.loadAsString(); return s; } catch (Exception ex) { throw new VFSException(ex, StringUtil.format("Error while reading {0} Query {1}", "SQL", file)); // $NLX-JdbcUtil.Errorwhilereading0Query1-1$ // $NON-NLS-2$ } } throw new VFSException(null, StringUtil.format("{0) file {1} does not exist", "SQL Query", file)); // $NLX-JdbcUtil.0file1doesnotexist-1$ // $NON-NLS-2$ } }); } catch (VFSException ex) { throw new FacesExceptionEx(ex, StringUtil.format("Error while loading {0} query file {1}", "SQL", fileName)); // $NLX-JdbcUtil.Errorwhileloading0queryfile1-1$ // $NON-NLS-2$ } } return null; } // ======================================================================== // SQL construction methods // ======================================================================== public static void appendTableName(StringBuilder b, String tbName) { b.append(tbName); } public static void appendColumnName(StringBuilder b, String colName) { appendColumnName(b, colName, true); } public static void appendColumnName(StringBuilder b, String colName, Boolean uCase) { if (uCase) { colName = colName.toUpperCase(); } b.append(colName); } // ======================================================================== // Count query // ======================================================================== public static String getCountQuery(String q) throws SQLException { // This function transforms a query into another query that actually // counts the number // of entry. It actually replaced the selection of the columns by a // count(*) // The query must be of the form // SELECT xxxx FROM <whatever> // Note that it might not be optimal is all the cases. Also, the // replacement is currently // done using basic string replacement, while a more robust code should // actually fully // parse the SQL. int sel = StringUtil.indexOfIgnoreCase(q, "select", 0); // $NON-NLS-1$ int from = StringUtil.indexOfIgnoreCase(q, "from", 0); // $NON-NLS-1$ if (sel < 0 || from < sel) { throw new SQLException(StringUtil.format("Unable to create a 'count' query for the {0} {1}", "SQL", q)); // $NLX-JdbcUtil.Unabletocreateacountqueryforthe01-1$ // $NON-NLS-2$ } return q.substring(0, sel + 6) + " count(*) " + q.substring(from); // $NON-NLS-1$ } }
extlib/lwp/product/runtime/eclipse/plugins/com.ibm.xsp.extlib.relational/src/com/ibm/xsp/extlib/relational/util/JdbcUtil.java
/* * Copyright IBM Corp. 2010 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.ibm.xsp.extlib.relational.util; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.sql.DataSource; import com.ibm.commons.util.StringUtil; import com.ibm.commons.vfs.VFS; import com.ibm.commons.vfs.VFSException; import com.ibm.commons.vfs.VFSFile; import com.ibm.commons.vfs.VFSObjectCache; import com.ibm.designer.runtime.Application; import com.ibm.xsp.FacesExceptionEx; import com.ibm.xsp.extlib.relational.component.jdbc.IJdbcConnectionManager; import com.ibm.xsp.extlib.relational.jdbc.jndi.JndiRegistry; import com.ibm.xsp.util.FacesUtil; public class JdbcUtil { public static String JDBC_ROOT = "/WEB-INF/jdbc"; // $NON-NLS-1$ // ======================================================================== // Access to JDBC Connections // ======================================================================== /** * Create a JDBC connection from a URL. The connection is actually created * and added to a FacesContextListener */ public static Connection createConnectionFromUrl(FacesContext context, String connectionUrl) throws SQLException { if (StringUtil.isNotEmpty(connectionUrl)) { return DriverManager.getConnection(connectionUrl); } return null; } /** * Create a JDBC connection. The connection is actually created and added to * a FacesContextListener */ public static Connection getConnection(FacesContext context, String name) throws SQLException { return createNamedConnection(context, name); } /** * Create a JDBC connection. The connection is actually created and added to * a FacesContextListener */ public static Connection createNamedConnection(FacesContext context, String name) throws SQLException { try { String jndiName = name; if (!jndiName.startsWith("java:")) { // $NON-NLS-1$ jndiName = JndiRegistry.getJNDIBindName(name); } InitialContext ctx = new InitialContext(); DataSource ds = (DataSource) ctx.lookup(jndiName); if (ds != null) { return ds.getConnection(); } return null; } catch (NamingException ex) { throw (SQLException) new SQLException().initCause(ex); } } /** * Get a connection from a connection manager. * * @param context * @param name * @param shared * @return * @throws SQLException */ public static Connection createManagedConnection(FacesContext context, UIComponent from, String name) throws SQLException { if (from == null) { from = context.getViewRoot(); } IJdbcConnectionManager manager = findConnectionManager(context, from, name); if (manager == null) { throw new FacesExceptionEx(null, StringUtil.format("Unknown {0} {1}", "ConnectionManager", name)); // $NLX-JdbcUtil.Unknown01-1$ // $NON-NLS-2$ } return manager.getConnection(); } /** * Find a connection manager by name * * @param context * @param name * @param shared * @return * @throws SQLException */ public static IJdbcConnectionManager findConnectionManager(FacesContext context, UIComponent from, String name) throws SQLException { UIComponent c = FacesUtil.getComponentFor(from, name); if (c != null) { return (IJdbcConnectionManager) c; } return null; } /** * Check if table had been created. */ public static boolean tableExists(Connection c, String schema) throws SQLException { return tableExists(c, schema, new String[] { "TABLE" }); // $NON-NLS-1$ } public static boolean tableExists(Connection c, String schema, String[] types) throws SQLException { ResultSet tables = c.getMetaData().getTables(null, schema, null, types); //$NON-NLS-1$ try { if (tables.next()) { return true; } else { return false; } } finally { tables.close(); } } /** * Check if a table exists. */ public static boolean tableExists(Connection c, String schema, String tableName) throws SQLException { return tableExists(c, schema, tableName, new String[] { "TABLE" }); // $NON-NLS-1$ } public static boolean tableExists(Connection c, String schema, String tableName, String[] types) throws SQLException { ResultSet tables = c.getMetaData().getTables(null, schema, tableName, types); //$NON-NLS-1$ try { if (tables.next()) { return true; } else { return false; } } finally { tables.close(); } } /** * Get the list of tables */ public static List<String> listTables(Connection c, String schema, String tableName) throws SQLException { return listTables(c, schema, tableName, new String[] { "TABLE" }); // $NON-NLS-1$ } public static List<String> listTables(Connection c, String schema, String tableName, String[] types) throws SQLException { ResultSet tables = c.getMetaData().getTables(null, schema, tableName, types); //$NON-NLS-1$ try { ArrayList<String> l = new ArrayList<String>(); while (tables.next()) { String sc = tables.getString("TABLE_SCHEM"); // $NON-NLS-1$ String tb = tables.getString("TABLE_NAME"); // $NON-NLS-1$ if (StringUtil.isEmpty(sc)) { l.add(tb); } else { l.add(StringUtil.format("{0}.{1}", sc, tb)); } } return l; } finally { tables.close(); } } /** * Read a SQL file from the resources. */ public static String readSqlFile(String fileName) { if (StringUtil.isNotEmpty(fileName)) { Application app = Application.get(); VFSObjectCache c = app.getVFSCache(); try { String fullPath = JDBC_ROOT + VFS.SEPARATOR + fileName; if (!fullPath.endsWith(".sql")) { // $NON-NLS-1$ fullPath = fullPath + ".sql"; // $NON-NLS-1$ } return (String) c.get(fullPath, new VFSObjectCache.ObjectLoader() { // $NON-NLS-1$ public Object loadObject(VFSFile file) throws VFSException { if (file.exists()) { try { String s = file.loadAsString(); return s; } catch (Exception ex) { throw new VFSException(ex, StringUtil.format("Error while reading {0} Query {1}", "SQL", file)); // $NLX-JdbcUtil.Errorwhilereading0Query1-1$ // $NON-NLS-2$ } } throw new VFSException(null, StringUtil.format("{0) file {1} does not exist", "SQL Query", file)); // $NLX-JdbcUtil.0file1doesnotexist-1$ // $NON-NLS-2$ } }); } catch (VFSException ex) { throw new FacesExceptionEx(ex, StringUtil.format("Error while loading {0} query file {1}", "SQL", fileName)); // $NLX-JdbcUtil.Errorwhileloading0queryfile1-1$ // $NON-NLS-2$ } } return null; } // ======================================================================== // SQL construction methods // ======================================================================== public static void appendTableName(StringBuilder b, String tbName) { b.append(tbName); } public static void appendColumnName(StringBuilder b, String colName) { appendColumnName(b, colName, true); } public static void appendColumnName(StringBuilder b, String colName, Boolean uCase) { if (uCase) { colName = colName.toUpperCase(); } b.append(colName); } // ======================================================================== // Count query // ======================================================================== <<<<<<< HEAD public static void appendColumnName(StringBuilder b, String colName) { appendColumnName(b, colName, true); } public static void appendColumnName(StringBuilder b, String colName, Boolean UCase) { if (UCase) { colName = colName.toUpperCase(); } b.append(colName); } // ======================================================================== // Count query // ======================================================================== public static String getCountQuery(String q) throws SQLException { // This function transforms a query into another query that actually counts the number // of entry. It actually replaced the selection of the columns by a count(*) // The query must be of the form // SELECT xxxx FROM <whatever> // Note that it might not be optimal is all the cases. Also, the replacement is currently // done using basic string replacement, while a more robust code should actually fully // parse the SQL. int sel = StringUtil.indexOfIgnoreCase(q, "select", 0); // $NON-NLS-1$ int from = StringUtil.indexOfIgnoreCase(q, "from", 0); // $NON-NLS-1$ if(sel<0 || from<sel) { throw new SQLException(StringUtil.format("Unable to create a 'count' query for the {0} {1}", "SQL", q)); // $NLX-JdbcUtil.Unabletocreateacountqueryforthe01-1$ $NON-NLS-2$ } return q.substring(0,sel+6)+" count(*) "+q.substring(from); // $NON-NLS-1$ } ======= public static String getCountQuery(String q) throws SQLException { // This function transforms a query into another query that actually // counts the number // of entry. It actually replaced the selection of the columns by a // count(*) // The query must be of the form // SELECT xxxx FROM <whatever> // Note that it might not be optimal is all the cases. Also, the // replacement is currently // done using basic string replacement, while a more robust code should // actually fully // parse the SQL. int sel = StringUtil.indexOfIgnoreCase(q, "select", 0); // $NON-NLS-1$ int from = StringUtil.indexOfIgnoreCase(q, "from", 0); // $NON-NLS-1$ if (sel < 0 || from < sel) { throw new SQLException(StringUtil.format("Unable to create a 'count' query for the {0} {1}", "SQL", q)); // $NLX-JdbcUtil.Unabletocreateacountqueryforthe01-1$ // $NON-NLS-2$ } return q.substring(0, sel + 6) + " count(*) " + q.substring(from); // $NON-NLS-1$ } >>>>>>> wpca }
Cleaning up appendColumnValue
extlib/lwp/product/runtime/eclipse/plugins/com.ibm.xsp.extlib.relational/src/com/ibm/xsp/extlib/relational/util/JdbcUtil.java
Cleaning up appendColumnValue
Java
apache-2.0
270da1246f488f7a4d286be2c41e38b07ce1512f
0
dswarm/Wikidata-Toolkit,dswarm/Wikidata-Toolkit,monkey2000/Wikidata-Toolkit,noa/Wikidata-Toolkit,noa/Wikidata-Toolkit,monkey2000/Wikidata-Toolkit,Wikidata/Wikidata-Toolkit,noa/Wikidata-Toolkit,zazi/Wikidata-Toolkit,notconfusing/Wikidata-Toolkit,zazi/Wikidata-Toolkit,noa/Wikidata-Toolkit,notconfusing/Wikidata-Toolkit,Wikidata/Wikidata-Toolkit
package org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.dumpfiles.EntityDocumentProcessor; import org.wikidata.wdtk.dumpfiles.MwDumpFileProcessor; import org.wikidata.wdtk.dumpfiles.MwDumpFileProcessorImpl; import org.wikidata.wdtk.dumpfiles.MwRevision; import org.wikidata.wdtk.dumpfiles.MwRevisionProcessor; import org.wikidata.wdtk.dumpfiles.MwRevisionProcessorBroker; import org.wikidata.wdtk.dumpfiles.StatisticsMwRevisionProcessor; import org.wikidata.wdtk.dumpfiles.WikibaseRevisionProcessor; import org.wikidata.wdtk.dumpfiles.WmfDumpFileManager; import org.wikidata.wdtk.util.DirectoryManager; import org.wikidata.wdtk.util.DirectoryManagerImpl; import org.wikidata.wdtk.util.WebResourceFetcher; import org.wikidata.wdtk.util.WebResourceFetcherImpl; /** * This class demonstrates how to write an application that downloads and * processes dumpfiles from Wikidata.org. It shows a rather pedestrian setup of * the whole processing pipeline. Much of this code will be the same or very * similar for other processing tasks, with only the last component changed. * * @author Markus Kroetzsch * */ public class DumpProcessingExample { public static void main(String[] args) throws IOException { // Define where log messages go configureLogging(); // Print information about this program printDocumentation(); // Create object to get hold of Wikidata.org dumpfiles WmfDumpFileManager dumpFileManager = createDumpFileManager(); // Set up processing pipeline MwDumpFileProcessor dumpFileProcessor = createDumpFileProcessor(); // Start processing (may trigger downloads where needed) dumpFileManager.processAllRecentDumps(dumpFileProcessor, true); } /** * Creates an object that manages dumpfiles published by the Wikimedia * Foundation. This object will check for available complete and incremental * dump files, both online and in a local download directory. It provides * direct access to the (decompressed) string content of these files. * <p> * The details in this method define which download directory is to be used, * which Wikimedia project we are interested in (Wikidata), and that we want * to allow online access (instead of using local files only). * * @return dump file manager * @throws IOException * if the download directory is not accessible */ private static WmfDumpFileManager createDumpFileManager() throws IOException { // The following can also be set to another directory: String downloadDirectory = System.getProperty("user.dir"); DirectoryManager downloadDirectoryManager = new DirectoryManagerImpl( downloadDirectory); // The following can be set to null for offline operation: WebResourceFetcher webResourceFetcher = new WebResourceFetcherImpl(); // The string "wikidatawiki" identifies Wikidata.org: return new WmfDumpFileManager("wikidatawiki", downloadDirectoryManager, webResourceFetcher); } /** * Create an object that handles the complete processing of MediaWiki * dumpfiles. This processing consists of the following main steps: * * <pre> * XML dump file -> page revisions -> item documents * </pre> * * The objects handling each step are of type {@link MwDumpFileProcessor}, * {@link MwRevisionProcessor}, and {@link EntityDocumentProcessor}. In each * case, the object on the left calls the object on the right whenever new * data is available. Therefore, the object on the right must be known to * the object on the left, so we set up the objects in reverse order. * <p> * Normally, there is exactly one processor of each type. In the code below, * we want to use two different objects to process revisions (one to analyse * Wikidata item information and one to gather basic statistics about all * revisions). To do this, we use a broker class that processes revisions to * distribute them further to any number of revision processors. * * @return dump file processor */ private static MwDumpFileProcessor createDumpFileProcessor() { // Our local example class ItemStatisticsProcessor counts the number of // labels etc. in Wikibase item documents to print out some statistics. // It is the last part of the processing chain. EntityDocumentProcessor edpItemStats = new ItemStatisticsProcessor(); // Revision processor for extracting entity documents from revisions: // (the documents are sent to our example document processor) MwRevisionProcessor rpItemStats = new WikibaseRevisionProcessor( edpItemStats); // Revision processor for general statistics and time keeping: MwRevisionProcessor rpRevisionStats = new StatisticsMwRevisionProcessor( "revision processing statistics", 10000); // Broker to distribute revisions to multiple subscribers: MwRevisionProcessorBroker rpBroker = new MwRevisionProcessorBroker(); // Subscribe to the most recent revisions of type wikibase item: rpBroker.registerMwRevisionProcessor(rpItemStats, MwRevision.MODEL_WIKIBASE_ITEM, true); // Subscribe to all current revisions (null = no filter): rpBroker.registerMwRevisionProcessor(rpRevisionStats, null, true); // Object to parse XML dumps to send page revisions to our broker: return new MwDumpFileProcessorImpl(rpBroker); } /** * Defines how messages should be logged. This method can be modified to * restrict the logging messages that are shown on the console or to change * their formatting. See the documentation of Log4J for details on how to do * this. */ private static void configureLogging() { // Create the appender that will write log messages to the console. ConsoleAppender consoleAppender = new ConsoleAppender(); // Define the pattern of log messages. // Insert the string "%c{1}:%L" to also show class name and line. String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; consoleAppender.setLayout(new PatternLayout(pattern)); // Change to Level.ERROR for fewer messages: consoleAppender.setThreshold(Level.INFO); consoleAppender.activateOptions(); Logger.getRootLogger().addAppender(consoleAppender); } /** * Print some basic documentation about this program. */ private static void printDocumentation() { System.out .println("********************************************************************"); System.out.println("*** Wikidata Toolkit: Dump Processing Example"); System.out.println("*** "); System.out .println("*** This program will download and process dumps from Wikidata."); System.out .println("*** It will print progress information and some simple statistics."); System.out .println("*** Downloading may take some time initially. After that, files"); System.out .println("*** are stored on disk and are used until newer dumps are available."); System.out .println("*** You can delete files manually when no longer needed (see "); System.out .println("*** message below for the directory where files are found)."); System.out .println("********************************************************************"); } /** * A simple example class that processes EntityDocuments to compute basic * statistics that are printed to the standard output. This could be * replaced with any other class that processes entity documents in some * way. * * @author Markus Kroetzsch * */ static class ItemStatisticsProcessor implements EntityDocumentProcessor { long countItems = 0; long countLabels = 0; long countDescriptions = 0; long countAliases = 0; long countStatements = 0; long countSiteLinks = 0; @Override public void processItemDocument(ItemDocument itemDocument) { this.countItems++; this.countLabels += itemDocument.getLabels().size(); this.countDescriptions += itemDocument.getDescriptions().size(); for (String languageKey : itemDocument.getAliases().keySet()) { this.countAliases += itemDocument.getAliases().get(languageKey) .size(); } for (StatementGroup sg : itemDocument.getStatementGroups()) { this.countStatements += sg.getStatements().size(); } this.countSiteLinks += itemDocument.getSiteLinks().size(); // print a report every 10000 items: if (this.countItems % 10000 == 0) { printReport(); } } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { // ignore properties } @Override public void finishProcessingEntityDocuments() { printReport(); // print a final report } /** * Prints a report about the statistics gathered so far. */ private void printReport() { System.out.println("Processed " + this.countItems + " items:"); System.out.println(" * Labels: " + this.countLabels); System.out.println(" * Descriptions: " + this.countDescriptions); System.out.println(" * Aliases: " + this.countAliases); System.out.println(" * Statements: " + this.countStatements); System.out.println(" * Site links: " + this.countSiteLinks); } } }
wdtk-examples/src/main/java/org/wikidata/wdtk/examples/DumpProcessingExample.java
package org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.dumpfiles.EntityDocumentProcessor; import org.wikidata.wdtk.dumpfiles.MwDumpFileProcessor; import org.wikidata.wdtk.dumpfiles.MwDumpFileProcessorImpl; import org.wikidata.wdtk.dumpfiles.MwRevision; import org.wikidata.wdtk.dumpfiles.MwRevisionProcessor; import org.wikidata.wdtk.dumpfiles.MwRevisionProcessorBroker; import org.wikidata.wdtk.dumpfiles.StatisticsMwRevisionProcessor; import org.wikidata.wdtk.dumpfiles.WikibaseRevisionProcessor; import org.wikidata.wdtk.dumpfiles.WmfDumpFileManager; import org.wikidata.wdtk.util.DirectoryManager; import org.wikidata.wdtk.util.DirectoryManagerImpl; import org.wikidata.wdtk.util.WebResourceFetcher; import org.wikidata.wdtk.util.WebResourceFetcherImpl; /** * This class demonstrates how to write an application that downloads and * processes dumpfiles from Wikidata.org. It shows a rather pedestrian setup of * the whole processing pipeline. Much of this code will be the same or very * similar for other processing tasks, with only the last component changed. * * @author Markus Kroetzsch * */ public class DumpProcessingExample { public static void main(String[] args) throws IOException { // Define where log messages go configureLogging(); // Create object to get hold of Wikidata.org dumpfiles WmfDumpFileManager dumpFileManager = createDumpFileManager(); // Set up processing pipeline MwDumpFileProcessor dumpFileProcessor = createDumpFileProcessor(); // Start processing (may trigger downloads where needed) dumpFileManager.processAllRecentDumps(dumpFileProcessor, true); } /** * Creates an object that manages dumpfiles published by the Wikimedia * Foundation. This object will check for available complete and incremental * dump files, both online and in a local download directory. It provides * direct access to the (decompressed) string content of these files. * <p> * The details in this method define which download directory is to be used, * which Wikimedia project we are interested in (Wikidata), and that we want * to allow online access (instead of using local files only). * * @return dump file manager * @throws IOException * if the download directory is not accessible */ private static WmfDumpFileManager createDumpFileManager() throws IOException { // The following can also be set to another directory: String downloadDirectory = System.getProperty("user.dir"); DirectoryManager downloadDirectoryManager = new DirectoryManagerImpl( downloadDirectory); // The following can be set to null for offline operation: WebResourceFetcher webResourceFetcher = new WebResourceFetcherImpl(); // The string "wikidatawiki" identifies Wikidata.org: return new WmfDumpFileManager("wikidatawiki", downloadDirectoryManager, webResourceFetcher); } /** * Create an object that handles the complete processing of MediaWiki * dumpfiles. This processing consists of the following main steps: * * <pre> * XML dump file -> page revisions -> item documents * </pre> * * The objects handling each step are of type {@link MwDumpFileProcessor}, * {@link MwRevisionProcessor}, and {@link EntityDocumentProcessor}. In each * case, the object on the left calls the object on the right whenever new * data is available. Therefore, the object on the right must be known to * the object on the left, so we set up the objects in reverse order. * <p> * Normally, there is exactly one processor of each type. In the code below, * we want to use two different objects to process revisions (one to analyse * Wikidata item information and one to gather basic statistics about all * revisions). To do this, we use a broker class that processes revisions to * distribute them further to any number of revision processors. * * @return dump file processor */ private static MwDumpFileProcessor createDumpFileProcessor() { // Our local example class ItemStatisticsProcessor counts the number of // labels etc. in Wikibase item documents to print out some statistics. // It is the last part of the processing chain. EntityDocumentProcessor edpItemStats = new ItemStatisticsProcessor(); // Revision processor for extracting entity documents from revisions: // (the documents are sent to our example document processor) MwRevisionProcessor rpItemStats = new WikibaseRevisionProcessor( edpItemStats); // Revision processor for general statistics and time keeping: MwRevisionProcessor rpRevisionStats = new StatisticsMwRevisionProcessor( "revision processing statistics", 10000); // Broker to distribute revisions to multiple subscribers: MwRevisionProcessorBroker rpBroker = new MwRevisionProcessorBroker(); // Subscribe to the most recent revisions of type wikibase item: rpBroker.registerMwRevisionProcessor(rpItemStats, MwRevision.MODEL_WIKIBASE_ITEM, true); // Subscribe to all current revisions (null = no filter): rpBroker.registerMwRevisionProcessor(rpRevisionStats, null, true); // Object to parse XML dumps to send page revisions to our broker: return new MwDumpFileProcessorImpl(rpBroker); } /** * Defines how messages should be logged. This method can be modified to * restrict the logging messages that are shown on the console or to change * their formatting. See the documentation of Log4J for details on how to do * this. */ private static void configureLogging() { // Create the appender that will write log messages to the console. ConsoleAppender consoleAppender = new ConsoleAppender(); // Define the pattern of log messages. // Insert the string "%c{1}:%L" to also show class name and line. String pattern = "%d{yyyy-MM-dd HH:mm:ss} %-5p - %m%n"; consoleAppender.setLayout(new PatternLayout(pattern)); // Change to Level.ERROR for fewer messages: consoleAppender.setThreshold(Level.INFO); consoleAppender.activateOptions(); Logger.getRootLogger().addAppender(consoleAppender); } /** * A simple example class that processes EntityDocuments to compute basic * statistics that are printed to the standard output. This could be * replaced with any other class that processes entity documents in some * way. * * @author Markus Kroetzsch * */ static class ItemStatisticsProcessor implements EntityDocumentProcessor { long countItems = 0; long countLabels = 0; long countDescriptions = 0; long countAliases = 0; long countStatements = 0; long countSiteLinks = 0; @Override public void processItemDocument(ItemDocument itemDocument) { this.countItems++; this.countLabels += itemDocument.getLabels().size(); this.countDescriptions += itemDocument.getDescriptions().size(); for (String languageKey : itemDocument.getAliases().keySet()) { this.countAliases += itemDocument.getAliases().get(languageKey) .size(); } for (StatementGroup sg : itemDocument.getStatementGroups()) { this.countStatements += sg.getStatements().size(); } this.countSiteLinks += itemDocument.getSiteLinks().size(); // print a report every 10000 items: if (this.countItems % 10000 == 0) { printReport(); } } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { // ignore properties } @Override public void finishProcessingEntityDocuments() { printReport(); // print a final report } /** * Prints a report about the statistics gathered so far. */ private void printReport() { System.out.println("Processed " + this.countItems + " items:"); System.out.println(" * Labels: " + this.countLabels); System.out.println(" * Descriptions: " + this.countDescriptions); System.out.println(" * Aliases: " + this.countAliases); System.out.println(" * Statements: " + this.countStatements); System.out.println(" * Site links: " + this.countSiteLinks); } } }
Print documentation when started
wdtk-examples/src/main/java/org/wikidata/wdtk/examples/DumpProcessingExample.java
Print documentation when started
Java
apache-2.0
fad0cbf1ee51a188ea68ddcda570eafe0dcffb53
0
fpadoan/metasyntactic,alokc83/metasyntactic,fpadoan/metasyntactic,nguyenphanhuynh/metasyntactic,alokc83/metasyntactic,fpadoan/metasyntactic,nguyenphanhuynh/metasyntactic,fpadoan/metasyntactic,alokc83/metasyntactic,nguyenphanhuynh/metasyntactic,alokc83/metasyntactic,alokc83/metasyntactic,fpadoan/metasyntactic,fpadoan/metasyntactic,nguyenphanhuynh/metasyntactic,fpadoan/metasyntactic,nguyenphanhuynh/metasyntactic,alokc83/metasyntactic,alokc83/metasyntactic,nguyenphanhuynh/metasyntactic,nguyenphanhuynh/metasyntactic,alokc83/metasyntactic
package org.metasyntactic.activities; import static org.apache.commons.collections.CollectionUtils.isEmpty; import static org.metasyntactic.utilities.StringUtilities.isNullOrEmpty; import java.io.File; import java.lang.ref.SoftReference; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import org.metasyntactic.INowPlaying; import org.metasyntactic.NowPlayingApplication; import org.metasyntactic.NowPlayingControllerWrapper; import org.metasyntactic.UserTask; import org.metasyntactic.data.Movie; import org.metasyntactic.data.Score; import org.metasyntactic.providers.DataProvider; import org.metasyntactic.utilities.FileUtilities; import org.metasyntactic.utilities.LogUtilities; import org.metasyntactic.utilities.MovieViewUtilities; import org.metasyntactic.utilities.StringUtilities; import org.metasyntactic.views.CustomGridView; import org.metasyntactic.views.FastScrollGridView; import org.metasyntactic.views.NowPlayingPreferenceDialog; import org.metasyntactic.views.Rotate3dAnimation; import android.app.Activity; import android.app.AlertDialog; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.os.Bundle; import android.os.Parcelable; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.view.View.OnClickListener; import android.view.animation.Animation; import android.view.animation.Animation.AnimationListener; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.Button; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import android.widget.AdapterView.OnItemClickListener; public class NowPlayingActivity extends Activity implements INowPlaying { private CustomGridView grid; private Intent intent; private Movie selectedMovie; private PostersAdapter postersAdapter; private boolean isGridSetup; private List<Movie> movies; private int lastPosition; private String search; private final Map<Integer, Integer> alphaMovieSectionsMap = new HashMap<Integer, Integer>(); private final Map<Integer, Integer> alphaMoviePositionsMap = new HashMap<Integer, Integer>(); private final Map<Integer, Integer> scoreMovieSectionsMap = new HashMap<Integer, Integer>(); private final Map<Integer, Integer> scoreMoviePositionsMap = new HashMap<Integer, Integer>(); private static final Map<String, SoftReference<Bitmap>> postersMap = new HashMap<String, SoftReference<Bitmap>>(); private String[] alphabet; private String[] score; private TextView progressUpdate; private RelativeLayout bottomBar; /* This task is controlled by the TaskManager based on the scrolling state */ private UserTask<?, ?, ?> mTask; private final BroadcastReceiver broadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(final Context context, final Intent intent) { refresh(); } }; private final BroadcastReceiver progressBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(final Context context, final Intent intent) { progressUpdate.setText(intent.getStringExtra("message")); } }; private final BroadcastReceiver dataBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(final Context context, final Intent intent) { // the data provider finished downloading. set up our view accordingly. setupView(); } }; private final BroadcastReceiver scrollStatebroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(final Context context, final Intent intent) { if (NowPlayingApplication.NOT_SCROLLING_INTENT.equals(intent.getAction()) && mTask != null && mTask.getStatus() != UserTask.Status.RUNNING) { mTask = new LoadPostersTask().execute(null); } if (NowPlayingApplication.SCROLLING_INTENT.equals(intent.getAction()) && mTask != null && mTask.getStatus() == UserTask.Status.RUNNING) { mTask.cancel(true); mTask = null; } } }; private void showNoInformationFoundDialog() { new AlertDialog.Builder(this).setMessage(R.string.no_information) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, final int whichButton) { } }).show(); } private void setupView() { // we're currently in 'downloading' mode. We need to deal with a few // cases. First, we deal with the case where a user has returned to // this activity, and now there are movies available. In that case, we // just display them. refresh(); if (isEmpty(movies)) { // Ok. so we have no movies. THat means one of two things. Either // we're trying to download the movies, or we tried and failed to // download them. In the former case just wait. We'll get a // notification when they're done. In the latter case, let the user // know. if (!isNullOrEmpty(NowPlayingControllerWrapper.getUserLocation()) && NowPlayingControllerWrapper.getDataProviderState() == DataProvider.State.Finished) { showNoInformationFoundDialog(); } } else { setupMovieGrid(); } } @Override protected void onResume() { super.onResume(); LogUtilities.i(getClass().getSimpleName(), "onResume"); if (FileUtilities.isSDCardAccessible()) { registerReceiver(broadcastReceiver, new IntentFilter( NowPlayingApplication.NOW_PLAYING_CHANGED_INTENT)); registerReceiver(dataBroadcastReceiver, new IntentFilter( NowPlayingApplication.NOW_PLAYING_LOCAL_DATA_DOWNLOADED)); registerReceiver(scrollStatebroadcastReceiver, new IntentFilter( NowPlayingApplication.SCROLLING_INTENT)); registerReceiver(scrollStatebroadcastReceiver, new IntentFilter( NowPlayingApplication.NOT_SCROLLING_INTENT)); registerReceiver(progressBroadcastReceiver, new IntentFilter( NowPlayingApplication.NOW_PLAYING_LOCAL_DATA_DOWNLOAD_PROGRESS)); if (isGridSetup) { grid.setVisibility(View.VISIBLE); postersAdapter.refreshMovies(); } else { setupView(); } } } @Override protected void onPause() { LogUtilities.i(getClass().getSimpleName(), "onPause"); if (FileUtilities.isSDCardAccessible()) { unregisterReceiver(broadcastReceiver); unregisterReceiver(dataBroadcastReceiver); unregisterReceiver(scrollStatebroadcastReceiver); unregisterReceiver(progressBroadcastReceiver); if (mTask != null && mTask.getStatus() == UserTask.Status.RUNNING) { mTask.cancel(true); mTask = null; } } super.onPause(); } @Override protected void onDestroy() { LogUtilities.i(getClass().getSimpleName(), "onDestroy"); if (FileUtilities.isSDCardAccessible()) { NowPlayingControllerWrapper.removeActivity(this); if (mTask != null && mTask.getStatus() == UserTask.Status.RUNNING) { mTask.cancel(true); mTask = null; } clearBitmaps(); alphaMovieSectionsMap.clear(); alphaMoviePositionsMap.clear(); scoreMovieSectionsMap.clear(); scoreMoviePositionsMap.clear(); } super.onDestroy(); } @Override public Object onRetainNonConfigurationInstance() { LogUtilities.i(getClass().getSimpleName(), "onRetainNonConfigurationInstance"); final Object result = search; NowPlayingControllerWrapper.onRetainNonConfigurationInstance(this, result); return result; } /** * Updates display of the list of movies. */ public void refresh() { if (search == null) { movies = new ArrayList<Movie>(NowPlayingControllerWrapper.getMovies()); } // sort movies according to the default sort preference. final Comparator<Movie> comparator = MOVIE_ORDER.get(NowPlayingControllerWrapper .getAllMoviesSelectedSortIndex()); Collections.sort(movies, comparator); if (postersAdapter != null) { populateAlphaMovieSectionsAndPositions(); populateScoreMovieSectionsAndPositions(); FastScrollGridView.getSections(); postersAdapter.refreshMovies(); } // cancel task so that it doesnt try to load old set of movies if (mTask != null && mTask.getStatus() == UserTask.Status.RUNNING) { mTask.cancel(true); mTask = null; } mTask = new LoadPostersTask().execute(null); } private List<Movie> getMatchingMoviesList(final String search2) { final String localSearch = search2.toLowerCase(); final List<Movie> matchingMovies = new ArrayList<Movie>(); for (final Movie movie : movies) { if (movie.getDisplayTitle().toLowerCase().contains(localSearch)) { matchingMovies.add(movie); } } return matchingMovies; } public Context getContext() { return this; } @Override public void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); LogUtilities.i(getClass().getSimpleName(), "onCreate"); search = (String) getLastNonConfigurationInstance(); // check for sdcard mounted properly if (FileUtilities.isSDCardAccessible()) { // Request the progress bar to be shown in the title requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); setContentView(R.layout.progressbar_1); progressUpdate = (TextView) findViewById(R.id.progress_update); NowPlayingControllerWrapper.addActivity(this); getUserLocation(); refresh(); } else { new AlertDialog.Builder(this).setTitle(R.string.insert_sdcard).setPositiveButton( android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, final int whichButton) { finish(); } }).show(); } } @Override protected void onNewIntent(final Intent intent) { super.onNewIntent(intent); search = intent.getStringExtra("movie"); if (search != null) { bottomBar.setVisibility(View.VISIBLE); } getSearchResults(); refresh(); } private void getUserLocation() { final String userLocation = NowPlayingControllerWrapper.getUserLocation(); if (StringUtilities.isNullOrEmpty(userLocation)) { final Intent localIntent = new Intent(); localIntent.setClass(this, SettingsActivity.class); startActivity(localIntent); } } private void getSearchResults() { if (search != null) { final List<Movie> matchingMovies = getMatchingMoviesList(search); if (isEmpty(matchingMovies)) { Toast.makeText(this, getResources().getString(R.string.no_results_found_for) + search, Toast.LENGTH_SHORT).show(); } else { movies = matchingMovies; // cancel task so that it doesnt try to load the complete set of movies. if (mTask != null && mTask.getStatus() == UserTask.Status.RUNNING) { mTask.cancel(true); mTask = null; } } } } private static void clearBitmaps() { for (final SoftReference<Bitmap> reference : postersMap.values()) { final Bitmap drawable = reference.get(); if (drawable != null) { reference.clear(); } } } private void getAlphabet(final Context context) { final String alphabetString = context.getResources().getString(R.string.alphabet); alphabet = new String[alphabetString.length()]; for (int i = 0; i < alphabet.length; i++) { alphabet[i] = String.valueOf(alphabetString.charAt(i)); } } private void getScores() { score = new String[11]; for (int index = 0, i = 100; i >= 0; index++, i -= 10) { score[index] = i + "%"; } } private void setupMovieGrid() { if (isGridSetup) { return; } isGridSetup = true; getAlphabet(this); getScores(); setContentView(R.layout.moviegrid_anim); bottomBar = (RelativeLayout) findViewById(R.id.bottom_bar); if (search == null) { bottomBar.setVisibility(View.GONE); } final Button allMovies = (Button) findViewById(R.id.all_movies); allMovies.setOnClickListener(new OnClickListener() { public void onClick(final View arg0) { final Intent intent = new Intent().setClass(NowPlayingActivity.this, NowPlayingActivity.class); startActivity(intent); } }); grid = (CustomGridView) findViewById(R.id.grid); grid.setOnItemClickListener(new OnItemClickListener() { public void onItemClick(final AdapterView parent, final View view, final int position, final long id) { selectedMovie = movies.get(position); setupRotationAnimation(view); } }); populateAlphaMovieSectionsAndPositions(); populateScoreMovieSectionsAndPositions(); postersAdapter = new PostersAdapter(); grid.setAdapter(postersAdapter); intent = new Intent().setClass(this, MovieDetailsActivity.class); } private void populateAlphaMovieSectionsAndPositions() { int i = 0; String prevLetter = null; final List<String> alphabets = Arrays.asList(alphabet); for (final Movie movie : movies) { final String firstLetter = movie.getDisplayTitle().substring(0, 1); alphaMovieSectionsMap.put(i, alphabets.indexOf(firstLetter)); if (!firstLetter.equals(prevLetter)) { alphaMoviePositionsMap.put(alphabets.indexOf(firstLetter), i); } prevLetter = firstLetter; i++; } for (i = 0; i < alphabets.size(); i++) { if (alphaMoviePositionsMap.get(i) == null) { if (i == 0) { alphaMoviePositionsMap.put(0, 0); } else { alphaMoviePositionsMap.put(i, alphaMoviePositionsMap.get(i - 1)); } } } } private void populateScoreMovieSectionsAndPositions() { int i = 0; int prevLevel = 0; final List<String> scores = Arrays.asList(score); for (final Movie movie : movies) { final Score localScore = NowPlayingControllerWrapper.getScore(movie); final int scoreValue = localScore == null ? 0 : localScore.getScoreValue(); final int scoreLevel = scoreValue / 10 * 10; scoreMovieSectionsMap.put(i, scores.indexOf(scoreLevel + "%")); if (scoreLevel != prevLevel) { scoreMoviePositionsMap.put(scores.indexOf(scoreLevel + "%"), i); } prevLevel = scoreLevel; i++; } for (i = 0; i < scores.size(); i++) { if (scoreMoviePositionsMap.get(i) == null) { if (i == 0) { scoreMoviePositionsMap.put(0, 0); } else { scoreMoviePositionsMap.put(i, scoreMoviePositionsMap.get(i - 1)); } } } } public final static List<Comparator<Movie>> MOVIE_ORDER = Arrays.asList(Movie.TITLE_ORDER, Movie.RELEASE_ORDER, Movie.SCORE_ORDER); private class PostersAdapter extends BaseAdapter implements FastScrollGridView.SectionIndexer { private final LayoutInflater inflater; private PostersAdapter() { // Cache the LayoutInflate to avoid asking for a new one each time. inflater = LayoutInflater.from(NowPlayingActivity.this); } public View getView(final int position, View convertView, final ViewGroup parent) { // to findViewById() on each row. final ViewHolder holder; // When convertView is not null, we can reuse it directly, there is // no need to reinflate it. We only inflate a new View when the // convertView // supplied by GridView is null. if (convertView == null) { convertView = inflater.inflate(R.layout.moviegrid_item, null); // Creates a ViewHolder and store references to the two children // views we want to bind data to. holder = new ViewHolder((TextView) convertView.findViewById(R.id.title), (ImageView) convertView.findViewById(R.id.poster)); convertView.setTag(holder); } else { // Get the ViewHolder back to get fast access to the TextView // and the ImageView. holder = (ViewHolder) convertView.getTag(); } final Movie movie = movies.get(position % movies.size()); NowPlayingControllerWrapper.prioritizeMovie(movie); holder.title.setText(movie.getDisplayTitle()); // optimized bitmap cache and bitmap loading holder.title.setEllipsize(TextUtils.TruncateAt.END); holder.poster.setImageDrawable(getResources().getDrawable(R.drawable.loader2)); final SoftReference<Bitmap> reference = postersMap.get(movies.get(position) .getCanonicalTitle()); Bitmap bitmap = null; if (reference != null) { bitmap = reference.get(); } if (bitmap != null) { holder.poster.setImageBitmap(bitmap); } convertView .setBackgroundDrawable(getResources().getDrawable(R.drawable.gallery_background_1)); return convertView; } private class ViewHolder { private final TextView title; private final ImageView poster; private ViewHolder(final TextView title, final ImageView poster) { this.title = title; this.poster = poster; } } public final int getCount() { if (movies != null) { return Math.min(100, movies.size()); } else { return 0; } } public final Object getItem(final int position) { return movies.get(position % movies.size()); } public final long getItemId(final int position) { return position; } public void refreshMovies() { notifyDataSetChanged(); } public int getPositionForSection(final int section) { Integer position = null; if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 0) { position = alphaMoviePositionsMap.get(section); } if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 2) { position = scoreMoviePositionsMap.get(section); } if (position != null) { lastPosition = position; } return lastPosition; } public int getSectionForPosition(final int position) { if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 0) { return alphaMovieSectionsMap.get(position); } if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 2) { return scoreMovieSectionsMap.get(position); } return position; } public Object[] getSections() { // fast scroll is implemented only for alphabetic & score sort for release // 1. if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 0) { return alphabet; } if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 2) { return score; } return null; } } @Override public boolean onCreateOptionsMenu(final Menu menu) { menu.add(0, MovieViewUtilities.MENU_SEARCH, 0, R.string.search).setIcon( android.R.drawable.ic_menu_search); menu.add(0, MovieViewUtilities.MENU_SORT, 0, R.string.sort_movies).setIcon( R.drawable.ic_menu_switch); menu.add(0, MovieViewUtilities.MENU_THEATER, 0, R.string.theaters).setIcon( R.drawable.ic_menu_allfriends); menu.add(0, MovieViewUtilities.MENU_UPCOMING, 0, R.string.upcoming) .setIcon(R.drawable.upcoming); menu.add(0, MovieViewUtilities.MENU_SEND_FEEDBACK, 0, R.string.send_feedback).setIcon( android.R.drawable.ic_menu_send); menu.add(0, MovieViewUtilities.MENU_SETTINGS, 0, R.string.settings).setIcon( android.R.drawable.ic_menu_preferences).setIntent( new Intent(this, SettingsActivity.class).putExtra("from_menu", "yes")) .setAlphabeticShortcut('s'); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(final MenuItem item) { if (item.getItemId() == MovieViewUtilities.MENU_SORT) { final NowPlayingPreferenceDialog builder = new NowPlayingPreferenceDialog(this).setKey( NowPlayingPreferenceDialog.PreferenceKeys.MOVIES_SORT).setEntries( R.array.entries_movies_sort_preference).setPositiveButton(android.R.string.ok) .setNegativeButton(android.R.string.cancel); builder.setTitle(R.string.sort_movies); builder.show(); return true; } if (item.getItemId() == MovieViewUtilities.MENU_THEATER) { final Intent localIntent = new Intent(); localIntent.setClass(this, AllTheatersActivity.class); startActivity(localIntent); return true; } if (item.getItemId() == MovieViewUtilities.MENU_UPCOMING) { final Intent localIntent = new Intent(); localIntent.setClass(this, UpcomingMoviesActivity.class); startActivity(localIntent); return true; } if (item.getItemId() == MovieViewUtilities.MENU_SEARCH) { final Intent localIntent = new Intent(); localIntent.setClass(this, SearchMovieActivity.class); localIntent.putExtra("activity", "NowPlayingActivity"); startActivity(localIntent); return true; } if (item.getItemId() == MovieViewUtilities.MENU_SEND_FEEDBACK) { final Resources res = getResources(); final String address = "[email protected], [email protected]"; final Intent localIntent = new Intent(Intent.ACTION_SENDTO, Uri.parse("mailto:" + address)); localIntent.putExtra("subject", res.getString(R.string.feedback)); final String body = getUserSettings(); localIntent.putExtra("body", body); startActivity(localIntent); return true; } return false; } private String getUserSettings() { String body = "\n\n\n\n"; body += NowPlayingApplication.getNameAndVersion(getResources()); body += "\nAuto-Update Location: " + NowPlayingControllerWrapper.isAutoUpdateEnabled(); body += "\nLocation: " + NowPlayingControllerWrapper.getUserLocation(); body += "\nSearch Distance: " + NowPlayingControllerWrapper.getSearchDistance(); body += "\nReviews: " + NowPlayingControllerWrapper.getScoreType(); return body; } private void setupRotationAnimation(final View view) { final float centerX = view.getWidth() / 2.0f; final float centerY = view.getHeight() / 2.0f; // Create a new 3D rotation with the supplied parameter // The animation listener is used to trigger the next animation final Rotate3dAnimation rotation = new Rotate3dAnimation(80, 0, centerX, centerY, 0.0f, true); rotation.setDuration(20); rotation.setFillAfter(true); rotation.setAnimationListener(new AnimationListener() { public void onAnimationEnd(final Animation animation) { intent.putExtra("movie", (Parcelable) selectedMovie); startActivity(intent); } public void onAnimationRepeat(final Animation animation) { } public void onAnimationStart(final Animation animation) { } }); view.startAnimation(rotation); } private class LoadPostersTask extends UserTask<Void, Void, Void> { @Override public Void doInBackground(final Void... params) { for (final Movie movie : movies) { final SoftReference<Bitmap> reference = postersMap.get(movie.getCanonicalTitle()); Bitmap bitmap = null; if (reference != null) { bitmap = reference.get(); } if (bitmap == null) { final File file = NowPlayingControllerWrapper.getPosterFile_safeToCallFromBackground(movie); if (file != null) { final byte[] bytes = FileUtilities.readBytes(file); if (bytes != null && bytes.length > 0) { bitmap = createBitmap(bytes); if (bitmap != null) { postersMap.put(movie.getCanonicalTitle(), new SoftReference<Bitmap>(bitmap)); } } } } } return null; } @Override public void onPostExecute(final Void result) { super.onPostExecute(result); if (postersAdapter != null) { postersAdapter.refreshMovies(); } } } private static Bitmap createBitmap(final byte[] bytes) { try { return BitmapFactory.decodeByteArray(bytes, 0, bytes.length); } catch (final OutOfMemoryError ignored) { return null; } /* * final BitmapFactory.Options options = new BitmapFactory.Options(); final * int width = 90; final int height = 125; // Get the dimensions only. * options.inJustDecodeBounds = true; BitmapFactory.decodeByteArray(bytes, * 0, bytes.length, options); final int bitmapWidth = options.outWidth; * final int bitmapHeight = options.outHeight; final float scale = * Math.min((float) bitmapWidth / (float) width, (float) bitmapHeight / * (float) height) 2; options.inJustDecodeBounds = false; * options.inPreferredConfig = Bitmap.Config.ARGB_8888; options.inSampleSize = * (int) scale; final Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, * 0, bytes.length, options); return bitmap; */ } }
NowPlaying/Android/src/org/metasyntactic/activities/NowPlayingActivity.java
package org.metasyntactic.activities; import android.app.Activity; import android.app.AlertDialog; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.os.Bundle; import android.os.Parcelable; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.view.Window; import android.view.animation.Animation; import android.view.animation.Animation.AnimationListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.BaseAdapter; import android.widget.Button; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; import static org.apache.commons.collections.CollectionUtils.isEmpty; import org.metasyntactic.INowPlaying; import org.metasyntactic.NowPlayingApplication; import org.metasyntactic.NowPlayingControllerWrapper; import org.metasyntactic.UserTask; import org.metasyntactic.data.Movie; import org.metasyntactic.data.Score; import org.metasyntactic.providers.DataProvider; import org.metasyntactic.utilities.FileUtilities; import org.metasyntactic.utilities.LogUtilities; import org.metasyntactic.utilities.MovieViewUtilities; import org.metasyntactic.utilities.StringUtilities; import static org.metasyntactic.utilities.StringUtilities.isNullOrEmpty; import org.metasyntactic.views.CustomGridView; import org.metasyntactic.views.FastScrollGridView; import org.metasyntactic.views.NowPlayingPreferenceDialog; import org.metasyntactic.views.Rotate3dAnimation; import java.io.File; import java.lang.ref.SoftReference; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; public class NowPlayingActivity extends Activity implements INowPlaying { private CustomGridView grid; private Intent intent; private Movie selectedMovie; private PostersAdapter postersAdapter; private boolean isGridSetup; private List<Movie> movies; private int lastPosition; private String search; private final Map<Integer, Integer> alphaMovieSectionsMap = new HashMap<Integer, Integer>(); private final Map<Integer, Integer> alphaMoviePositionsMap = new HashMap<Integer, Integer>(); private final Map<Integer, Integer> scoreMovieSectionsMap = new HashMap<Integer, Integer>(); private final Map<Integer, Integer> scoreMoviePositionsMap = new HashMap<Integer, Integer>(); private static final Map<String, SoftReference<Bitmap>> postersMap = new HashMap<String, SoftReference<Bitmap>>(); private String[] alphabet; private String[] score; private TextView progressUpdate; private RelativeLayout bottomBar; /* This task is controlled by the TaskManager based on the scrolling state */ private UserTask<?, ?, ?> mTask; private final BroadcastReceiver broadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(final Context context, final Intent intent) { refresh(); } }; private final BroadcastReceiver progressBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(final Context context, final Intent intent) { progressUpdate.setText(intent.getStringExtra("message")); } }; private final BroadcastReceiver dataBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(final Context context, final Intent intent) { // the data provider finished downloading. set up our view accordingly. setupView(); } }; private final BroadcastReceiver scrollStatebroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(final Context context, final Intent intent) { if (NowPlayingApplication.NOT_SCROLLING_INTENT.equals(intent.getAction()) && mTask != null && mTask.getStatus() != UserTask.Status.RUNNING) { mTask = new LoadPostersTask().execute(null); } if (NowPlayingApplication.SCROLLING_INTENT.equals(intent.getAction()) && mTask != null && mTask.getStatus() == UserTask.Status.RUNNING) { mTask.cancel(true); mTask = null; } } }; private void showNoInformationFoundDialog() { new AlertDialog.Builder(this).setMessage(R.string.no_information) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, final int whichButton) { } }).show(); } private void setupView() { // we're currently in 'downloading' mode. We need to deal with a few // cases. First, we deal with the case where a user has returned to // this activity, and now there are movies available. In that case, we // just display them. refresh(); if (isEmpty(movies)) { // Ok. so we have no movies. THat means one of two things. Either // we're trying to download the movies, or we tried and failed to // download them. In the former case just wait. We'll get a // notification when they're done. In the latter case, let the user // know. if (!isNullOrEmpty(NowPlayingControllerWrapper.getUserLocation()) && NowPlayingControllerWrapper.getDataProviderState() == DataProvider.State.Finished) { showNoInformationFoundDialog(); } } else { setupMovieGrid(); } } @Override protected void onResume() { super.onResume(); LogUtilities.i(getClass().getSimpleName(), "onResume"); if (FileUtilities.isSDCardAccessible()) { registerReceiver(broadcastReceiver, new IntentFilter( NowPlayingApplication.NOW_PLAYING_CHANGED_INTENT)); registerReceiver(dataBroadcastReceiver, new IntentFilter( NowPlayingApplication.NOW_PLAYING_LOCAL_DATA_DOWNLOADED)); registerReceiver(scrollStatebroadcastReceiver, new IntentFilter( NowPlayingApplication.SCROLLING_INTENT)); registerReceiver(scrollStatebroadcastReceiver, new IntentFilter( NowPlayingApplication.NOT_SCROLLING_INTENT)); registerReceiver(progressBroadcastReceiver, new IntentFilter( NowPlayingApplication.NOW_PLAYING_LOCAL_DATA_DOWNLOAD_PROGRESS)); if (isGridSetup) { grid.setVisibility(View.VISIBLE); postersAdapter.refreshMovies(); } else { setupView(); } } } @Override protected void onPause() { LogUtilities.i(getClass().getSimpleName(), "onPause"); if (FileUtilities.isSDCardAccessible()) { unregisterReceiver(broadcastReceiver); unregisterReceiver(dataBroadcastReceiver); unregisterReceiver(scrollStatebroadcastReceiver); unregisterReceiver(progressBroadcastReceiver); if (mTask != null && mTask.getStatus() == UserTask.Status.RUNNING) { mTask.cancel(true); mTask = null; } } super.onPause(); } @Override protected void onDestroy() { LogUtilities.i(getClass().getSimpleName(), "onDestroy"); if (FileUtilities.isSDCardAccessible()) { NowPlayingControllerWrapper.removeActivity(this); if (mTask != null && mTask.getStatus() == UserTask.Status.RUNNING) { mTask.cancel(true); mTask = null; } clearBitmaps(); alphaMovieSectionsMap.clear(); alphaMoviePositionsMap.clear(); scoreMovieSectionsMap.clear(); scoreMoviePositionsMap.clear(); } super.onDestroy(); } @Override public Object onRetainNonConfigurationInstance() { LogUtilities.i(getClass().getSimpleName(), "onRetainNonConfigurationInstance"); final Object result = search; NowPlayingControllerWrapper.onRetainNonConfigurationInstance(this, result); return result; } /** * Updates display of the list of movies. */ public void refresh() { if (search == null) { movies = new ArrayList<Movie>(NowPlayingControllerWrapper.getMovies()); } // sort movies according to the default sort preference. final Comparator<Movie> comparator = MOVIE_ORDER.get(NowPlayingControllerWrapper .getAllMoviesSelectedSortIndex()); Collections.sort(movies, comparator); if (postersAdapter != null) { populateAlphaMovieSectionsAndPositions(); populateScoreMovieSectionsAndPositions(); FastScrollGridView.getSections(); postersAdapter.refreshMovies(); } // cancel task so that it doesnt try to load old set of movies if (mTask != null && mTask.getStatus() == UserTask.Status.RUNNING) { mTask.cancel(true); mTask = null; } mTask = new LoadPostersTask().execute(null); } private List<Movie> getMatchingMoviesList(final String search2) { final String localSearch = search2.toLowerCase(); final List<Movie> matchingMovies = new ArrayList<Movie>(); for (final Movie movie : movies) { if (movie.getDisplayTitle().toLowerCase().contains(localSearch)) { matchingMovies.add(movie); } } return matchingMovies; } public Context getContext() { return this; } @Override public void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); LogUtilities.i(getClass().getSimpleName(), "onCreate"); search = (String) getLastNonConfigurationInstance(); // check for sdcard mounted properly if (FileUtilities.isSDCardAccessible()) { // Request the progress bar to be shown in the title requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); setContentView(R.layout.progressbar_1); progressUpdate = (TextView) findViewById(R.id.progress_update); NowPlayingControllerWrapper.addActivity(this); getUserLocation(); refresh(); } else { new AlertDialog.Builder(this).setTitle(R.string.insert_sdcard).setPositiveButton( android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(final DialogInterface dialog, final int whichButton) { finish(); } }).show(); } } @Override protected void onNewIntent(final Intent intent) { super.onNewIntent(intent); search = intent.getStringExtra("movie"); if (search != null) { bottomBar.setVisibility(View.VISIBLE); } getSearchResults(); refresh(); } private void getUserLocation() { final String userLocation = NowPlayingControllerWrapper.getUserLocation(); if (StringUtilities.isNullOrEmpty(userLocation)) { final Intent localIntent = new Intent(); localIntent.setClass(this, SettingsActivity.class); startActivity(localIntent); } } private void getSearchResults() { if (search != null) { final List<Movie> matchingMovies = getMatchingMoviesList(search); if (isEmpty(matchingMovies)) { Toast.makeText(this, getResources().getString(R.string.no_results_found_for) + search, Toast.LENGTH_SHORT).show(); } else { movies = matchingMovies; // cancel task so that it doesnt try to load the complete set of movies. if (mTask != null && mTask.getStatus() == UserTask.Status.RUNNING) { mTask.cancel(true); mTask = null; } } } } private static void clearBitmaps() { for (final SoftReference<Bitmap> reference : postersMap.values()) { final Bitmap drawable = reference.get(); if (drawable != null) { reference.clear(); } } } private void getAlphabet(final Context context) { final String alphabetString = context.getResources().getString(R.string.alphabet); alphabet = new String[alphabetString.length()]; for (int i = 0; i < alphabet.length; i++) { alphabet[i] = String.valueOf(alphabetString.charAt(i)); } } private void getScores() { score = new String[11]; for (int index = 0, i = 100; i >= 0; index++, i -= 10) { score[index] = i + "%"; } } private void setupMovieGrid() { if (isGridSetup) { return; } isGridSetup = true; getAlphabet(this); getScores(); setContentView(R.layout.moviegrid_anim); bottomBar = (RelativeLayout) findViewById(R.id.bottom_bar); if (search == null) { bottomBar.setVisibility(View.GONE); } final Button allMovies = (Button) findViewById(R.id.all_movies); allMovies.setOnClickListener(new OnClickListener() { public void onClick(final View arg0) { final Intent intent = new Intent().setClass(NowPlayingActivity.this, NowPlayingActivity.class); startActivity(intent); } }); grid = (CustomGridView) findViewById(R.id.grid); grid.setOnItemClickListener(new OnItemClickListener() { public void onItemClick(final AdapterView parent, final View view, final int position, final long id) { selectedMovie = movies.get(position); setupRotationAnimation(view); } }); populateAlphaMovieSectionsAndPositions(); populateScoreMovieSectionsAndPositions(); postersAdapter = new PostersAdapter(); grid.setAdapter(postersAdapter); intent = new Intent().setClass(this, MovieDetailsActivity.class); } private void populateAlphaMovieSectionsAndPositions() { int i = 0; String prevLetter = null; final List<String> alphabets = Arrays.asList(alphabet); for (final Movie movie : movies) { final String firstLetter = movie.getDisplayTitle().substring(0, 1); alphaMovieSectionsMap.put(i, alphabets.indexOf(firstLetter)); if (!firstLetter.equals(prevLetter)) { alphaMoviePositionsMap.put(alphabets.indexOf(firstLetter), i); } prevLetter = firstLetter; i++; } for (i = 0; i < alphabets.size(); i++) { if (alphaMoviePositionsMap.get(i) == null) { if (i == 0) { alphaMoviePositionsMap.put(0, 0); } else { alphaMoviePositionsMap.put(i, alphaMoviePositionsMap.get(i - 1)); } } } } private void populateScoreMovieSectionsAndPositions() { int i = 0; int prevLevel = 0; final List<String> scores = Arrays.asList(score); for (final Movie movie : movies) { final Score localScore = NowPlayingControllerWrapper.getScore(movie); final int scoreValue = localScore == null ? 0 : localScore.getScoreValue(); final int scoreLevel = scoreValue / 10 * 10; scoreMovieSectionsMap.put(i, scores.indexOf(scoreLevel + "%")); if (scoreLevel != prevLevel) { scoreMoviePositionsMap.put(scores.indexOf(scoreLevel + "%"), i); } prevLevel = scoreLevel; i++; } for (i = 0; i < scores.size(); i++) { if (scoreMoviePositionsMap.get(i) == null) { if (i == 0) { scoreMoviePositionsMap.put(0, 0); } else { scoreMoviePositionsMap.put(i, scoreMoviePositionsMap.get(i - 1)); } } } } public final static List<Comparator<Movie>> MOVIE_ORDER = Arrays.asList(Movie.TITLE_ORDER, Movie.RELEASE_ORDER, Movie.SCORE_ORDER); private class PostersAdapter extends BaseAdapter implements FastScrollGridView.SectionIndexer { private final LayoutInflater inflater; private PostersAdapter() { // Cache the LayoutInflate to avoid asking for a new one each time. inflater = LayoutInflater.from(NowPlayingActivity.this); } public View getView(final int position, View convertView, final ViewGroup parent) { // to findViewById() on each row. final ViewHolder holder; // When convertView is not null, we can reuse it directly, there is // no need to reinflate it. We only inflate a new View when the // convertView // supplied by GridView is null. if (convertView == null) { convertView = inflater.inflate(R.layout.moviegrid_item, null); // Creates a ViewHolder and store references to the two children // views we want to bind data to. holder = new ViewHolder((TextView) convertView.findViewById(R.id.title), (ImageView) convertView.findViewById(R.id.poster)); convertView.setTag(holder); } else { // Get the ViewHolder back to get fast access to the TextView // and the ImageView. holder = (ViewHolder) convertView.getTag(); } final Movie movie = movies.get(position % movies.size()); NowPlayingControllerWrapper.prioritizeMovie(movie); holder.title.setText(movie.getDisplayTitle()); // optimized bitmap cache and bitmap loading holder.title.setEllipsize(TextUtils.TruncateAt.END); holder.poster.setImageDrawable(getResources().getDrawable(R.drawable.loader2)); final SoftReference<Bitmap> reference = postersMap.get(movies.get(position) .getCanonicalTitle()); Bitmap bitmap = null; if (reference != null) { bitmap = reference.get(); } if (bitmap != null) { holder.poster.setImageBitmap(bitmap); } convertView .setBackgroundDrawable(getResources().getDrawable(R.drawable.gallery_background_1)); return convertView; } private class ViewHolder { private final TextView title; private final ImageView poster; private ViewHolder(final TextView title, final ImageView poster) { this.title = title; this.poster = poster; } } public final int getCount() { if (movies != null) { return Math.min(100, movies.size()); } else { return 0; } } public final Object getItem(final int position) { return movies.get(position % movies.size()); } public final long getItemId(final int position) { return position; } public void refreshMovies() { notifyDataSetChanged(); } public int getPositionForSection(final int section) { Integer position = null; if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 0) { position = alphaMoviePositionsMap.get(section); } if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 2) { position = scoreMoviePositionsMap.get(section); } if (position != null) { lastPosition = position; } return lastPosition; } public int getSectionForPosition(final int position) { if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 0) { return alphaMovieSectionsMap.get(position); } if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 2) { return scoreMovieSectionsMap.get(position); } return position; } public Object[] getSections() { // fast scroll is implemented only for alphabetic & score sort for release // 1. if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 0) { return alphabet; } if (NowPlayingControllerWrapper.getAllMoviesSelectedSortIndex() == 2) { return score; } return null; } } @Override public boolean onCreateOptionsMenu(final Menu menu) { menu.add(0, MovieViewUtilities.MENU_SEARCH, 0, R.string.search).setIcon( android.R.drawable.ic_menu_search); menu.add(0, MovieViewUtilities.MENU_SORT, 0, R.string.sort_movies).setIcon( R.drawable.ic_menu_switch); menu.add(0, MovieViewUtilities.MENU_THEATER, 0, R.string.theaters).setIcon( R.drawable.ic_menu_allfriends); menu.add(0, MovieViewUtilities.MENU_UPCOMING, 0, R.string.upcoming) .setIcon(R.drawable.upcoming); menu.add(0, MovieViewUtilities.MENU_SEND_FEEDBACK, 0, R.string.send_feedback).setIcon( android.R.drawable.ic_menu_send); menu.add(0, MovieViewUtilities.MENU_SETTINGS, 0, R.string.settings).setIcon( android.R.drawable.ic_menu_preferences).setIntent( new Intent(this, SettingsActivity.class).putExtra("from_menu", "yes")) .setAlphabeticShortcut('s'); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(final MenuItem item) { if (item.getItemId() == MovieViewUtilities.MENU_SORT) { final NowPlayingPreferenceDialog builder = new NowPlayingPreferenceDialog(this).setKey( NowPlayingPreferenceDialog.PreferenceKeys.MOVIES_SORT).setEntries( R.array.entries_movies_sort_preference).setPositiveButton(android.R.string.ok) .setNegativeButton(android.R.string.cancel); builder.setTitle(R.string.sort_movies); builder.show(); return true; } if (item.getItemId() == MovieViewUtilities.MENU_THEATER) { final Intent localIntent = new Intent(); localIntent.setClass(this, AllTheatersActivity.class); startActivity(localIntent); return true; } if (item.getItemId() == MovieViewUtilities.MENU_UPCOMING) { final Intent localIntent = new Intent(); localIntent.setClass(this, UpcomingMoviesActivity.class); startActivity(localIntent); return true; } if (item.getItemId() == MovieViewUtilities.MENU_SEARCH) { final Intent localIntent = new Intent(); localIntent.setClass(this, SearchMovieActivity.class); localIntent.putExtra("activity", "NowPlayingActivity"); startActivity(localIntent); return true; } if (item.getItemId() == MovieViewUtilities.MENU_SEND_FEEDBACK) { final Resources res = getResources(); final String address = "[email protected], [email protected]"; final Intent localIntent = new Intent(Intent.ACTION_SENDTO, Uri.parse("mailto:" + address)); localIntent.putExtra("subject", res.getString(R.string.feedback)); final String body = getUserSettings(); localIntent.putExtra("body", body); startActivity(localIntent); return true; } return false; } private String getUserSettings() { String body = "\n\n\n\n"; body += NowPlayingApplication.getNameAndVersion(getResources()); body += "\nAuto-Update Location: " + NowPlayingControllerWrapper.isAutoUpdateEnabled(); body += "\nLocation: " + NowPlayingControllerWrapper.getUserLocation(); body += "\nSearch Distance: " + NowPlayingControllerWrapper.getSearchDistance(); body += "\nReviews: " + NowPlayingControllerWrapper.getScoreType(); return body; } private void setupRotationAnimation(final View view) { final float centerX = view.getWidth() / 2.0f; final float centerY = view.getHeight() / 2.0f; // Create a new 3D rotation with the supplied parameter // The animation listener is used to trigger the next animation final Rotate3dAnimation rotation = new Rotate3dAnimation(80, 0, centerX, centerY, 0.0f, true); rotation.setDuration(20); rotation.setFillAfter(true); rotation.setAnimationListener(new AnimationListener() { public void onAnimationEnd(final Animation animation) { intent.putExtra("movie", (Parcelable) selectedMovie); startActivity(intent); } public void onAnimationRepeat(final Animation animation) { } public void onAnimationStart(final Animation animation) { } }); view.startAnimation(rotation); } private class LoadPostersTask extends UserTask<Void, Void, Void> { @Override public Void doInBackground(final Void... params) { Bitmap bitmap = null; for (final Movie movie : movies) { final SoftReference<Bitmap> reference = NowPlayingActivity.postersMap.get(movie .getCanonicalTitle()); if (reference != null) { bitmap = reference.get(); } if (reference == null || bitmap == null) { final File file = NowPlayingControllerWrapper .getPosterFile_safeToCallFromBackground(movie); if (file != null) { final byte[] bytes = FileUtilities.readBytes(file); if (bytes != null && bytes.length > 0) { bitmap = createBitmap(bytes); if (bitmap != null) { NowPlayingActivity.postersMap.put(movie.getCanonicalTitle(), new SoftReference<Bitmap>(bitmap)); } } } } } return null; } @Override public void onPostExecute(final Void result) { super.onPostExecute(result); if (postersAdapter != null) { postersAdapter.refreshMovies(); } } } private static Bitmap createBitmap(final byte[] bytes) { try { return BitmapFactory.decodeByteArray(bytes, 0, bytes.length); } catch (final OutOfMemoryError ignored) { return null; } /* * final BitmapFactory.Options options = new BitmapFactory.Options(); final * int width = 90; final int height = 125; // Get the dimensions only. * options.inJustDecodeBounds = true; BitmapFactory.decodeByteArray(bytes, * 0, bytes.length, options); final int bitmapWidth = options.outWidth; * final int bitmapHeight = options.outHeight; final float scale = * Math.min((float) bitmapWidth / (float) width, (float) bitmapHeight / * (float) height) 2; options.inJustDecodeBounds = false; * options.inPreferredConfig = Bitmap.Config.ARGB_8888; options.inSampleSize = * (int) scale; final Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, * 0, bytes.length, options); return bitmap; */ } }
cleaning up code
NowPlaying/Android/src/org/metasyntactic/activities/NowPlayingActivity.java
cleaning up code
Java
apache-2.0
f6a742d687f503c7ceea08e70f49c90da7027f5a
0
UweTrottmann/SeriesGuide,UweTrottmann/SeriesGuide,artemnikitin/SeriesGuide,epiphany27/SeriesGuide,hoanganhx86/SeriesGuide,r00t-user/SeriesGuide,0359xiaodong/SeriesGuide
package com.battlelancer.seriesguide.ui.dialogs; import android.app.Activity; import android.content.ContentValues; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import com.battlelancer.seriesguide.beta.R; import com.battlelancer.seriesguide.provider.SeriesContract.Lists; import com.battlelancer.seriesguide.ui.OnListsChangedListener; public class AddListDialogFragment extends DialogFragment { public static AddListDialogFragment newInstance() { AddListDialogFragment f = new AddListDialogFragment(); return f; } private EditText mTitle; private OnListsChangedListener mListener; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // hide title, use custom theme setStyle(STYLE_NO_TITLE, R.style.SeriesGuideTheme_Dialog); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { final View layout = inflater.inflate(R.layout.list_manage_dialog, null); // set alternate dialog title ((TextView) layout.findViewById(R.id.dialogTitle)).setText(R.string.list_add); // title mTitle = (EditText) layout.findViewById(R.id.title); // buttons Button buttonNegative = (Button) layout.findViewById(R.id.buttonNegative); buttonNegative.setText(android.R.string.cancel); buttonNegative.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { dismiss(); } }); Button buttonPositive = (Button) layout.findViewById(R.id.buttonPositive); buttonPositive.setText(R.string.list_add); buttonPositive.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (mTitle.getText().length() == 0) { return; } // add list String listName = mTitle.getText().toString(); ContentValues values = new ContentValues(); values.put(Lists.LIST_ID, Lists.generateListId(listName)); values.put(Lists.NAME, listName); getActivity().getContentResolver().insert(Lists.CONTENT_URI, values); // refresh view pager mListener.onListsChanged(); dismiss(); } }); return layout; } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mListener = (OnListsChangedListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement OnListsChangedListener"); } } /** * Display a dialog which allows to edit the title of this list or remove * it. */ public static void showAddListDialog(FragmentManager fm) { // DialogFragment.show() will take care of adding the fragment // in a transaction. We also want to remove any currently showing // dialog, so make our own transaction and take care of that here. FragmentTransaction ft = fm.beginTransaction(); Fragment prev = fm.findFragmentByTag("addlistdialog"); if (prev != null) { ft.remove(prev); } ft.addToBackStack(null); // Create and show the dialog. DialogFragment newFragment = AddListDialogFragment.newInstance(); newFragment.show(ft, "addlistdialog"); } }
SeriesGuide/src/com/battlelancer/seriesguide/ui/dialogs/AddListDialogFragment.java
package com.battlelancer.seriesguide.ui.dialogs; import android.app.Activity; import android.content.ContentValues; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.EditText; import com.battlelancer.seriesguide.beta.R; import com.battlelancer.seriesguide.provider.SeriesContract.Lists; import com.battlelancer.seriesguide.ui.OnListsChangedListener; public class AddListDialogFragment extends DialogFragment { public static AddListDialogFragment newInstance() { AddListDialogFragment f = new AddListDialogFragment(); return f; } private EditText mTitle; private OnListsChangedListener mListener; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // hide title, use custom theme setStyle(STYLE_NO_TITLE, R.style.SeriesGuideTheme_Dialog); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { final View layout = inflater.inflate(R.layout.list_manage_dialog, null); // title mTitle = (EditText) layout.findViewById(R.id.title); // buttons Button buttonNegative = (Button) layout.findViewById(R.id.buttonNegative); buttonNegative.setText(android.R.string.cancel); buttonNegative.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { dismiss(); } }); Button buttonPositive = (Button) layout.findViewById(R.id.buttonPositive); buttonPositive.setText(R.string.list_add); buttonPositive.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (mTitle.getText().length() == 0) { return; } // add list String listName = mTitle.getText().toString(); ContentValues values = new ContentValues(); values.put(Lists.LIST_ID, Lists.generateListId(listName)); values.put(Lists.NAME, listName); getActivity().getContentResolver().insert(Lists.CONTENT_URI, values); // refresh view pager mListener.onListsChanged(); dismiss(); } }); return layout; } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mListener = (OnListsChangedListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement OnListsChangedListener"); } } /** * Display a dialog which allows to edit the title of this list or remove * it. */ public static void showAddListDialog(FragmentManager fm) { // DialogFragment.show() will take care of adding the fragment // in a transaction. We also want to remove any currently showing // dialog, so make our own transaction and take care of that here. FragmentTransaction ft = fm.beginTransaction(); Fragment prev = fm.findFragmentByTag("addlistdialog"); if (prev != null) { ft.remove(prev); } ft.addToBackStack(null); // Create and show the dialog. DialogFragment newFragment = AddListDialogFragment.newInstance(); newFragment.show(ft, "addlistdialog"); } }
Display correct dialog title in add dialog.
SeriesGuide/src/com/battlelancer/seriesguide/ui/dialogs/AddListDialogFragment.java
Display correct dialog title in add dialog.
Java
apache-2.0
b3d6550993c37c3d9a007008c1897ebfd9ae22e7
0
twitter-forks/bazel,twitter-forks/bazel,cushon/bazel,davidzchen/bazel,dslomov/bazel-windows,dslomov/bazel-windows,aehlig/bazel,dslomov/bazel,aehlig/bazel,katre/bazel,ulfjack/bazel,dslomov/bazel-windows,perezd/bazel,safarmer/bazel,ButterflyNetwork/bazel,perezd/bazel,ulfjack/bazel,perezd/bazel,dslomov/bazel-windows,safarmer/bazel,ButterflyNetwork/bazel,werkt/bazel,perezd/bazel,davidzchen/bazel,katre/bazel,werkt/bazel,aehlig/bazel,perezd/bazel,dslomov/bazel,werkt/bazel,ButterflyNetwork/bazel,cushon/bazel,davidzchen/bazel,twitter-forks/bazel,dslomov/bazel-windows,ulfjack/bazel,aehlig/bazel,ButterflyNetwork/bazel,perezd/bazel,twitter-forks/bazel,meteorcloudy/bazel,werkt/bazel,katre/bazel,meteorcloudy/bazel,ButterflyNetwork/bazel,cushon/bazel,davidzchen/bazel,aehlig/bazel,bazelbuild/bazel,akira-baruah/bazel,dslomov/bazel,werkt/bazel,safarmer/bazel,bazelbuild/bazel,bazelbuild/bazel,katre/bazel,katre/bazel,akira-baruah/bazel,dslomov/bazel,dslomov/bazel,cushon/bazel,katre/bazel,dslomov/bazel-windows,bazelbuild/bazel,twitter-forks/bazel,ButterflyNetwork/bazel,akira-baruah/bazel,werkt/bazel,meteorcloudy/bazel,akira-baruah/bazel,dslomov/bazel,davidzchen/bazel,meteorcloudy/bazel,meteorcloudy/bazel,cushon/bazel,davidzchen/bazel,bazelbuild/bazel,aehlig/bazel,perezd/bazel,akira-baruah/bazel,bazelbuild/bazel,ulfjack/bazel,ulfjack/bazel,safarmer/bazel,aehlig/bazel,twitter-forks/bazel,meteorcloudy/bazel,cushon/bazel,twitter-forks/bazel,ulfjack/bazel,safarmer/bazel,meteorcloudy/bazel,ulfjack/bazel,akira-baruah/bazel,dslomov/bazel,safarmer/bazel,davidzchen/bazel
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.java; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.devtools.build.lib.rules.java.JavaCompileActionBuilder.UTF8_ENVIRONMENT; import static java.nio.charset.StandardCharsets.ISO_8859_1; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ExecutionRequirements; import com.google.devtools.build.lib.actions.ParamFileInfo; import com.google.devtools.build.lib.actions.ParameterFile.ParameterFileType; import com.google.devtools.build.lib.actions.SpawnResult; import com.google.devtools.build.lib.analysis.FilesToRunProvider; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.SpawnAction; import com.google.devtools.build.lib.analysis.config.CoreOptionConverters.StrictDepsMode; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.rules.java.JavaCompileAction.ProgressMessage; import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaClasspathMode; import com.google.devtools.build.lib.rules.java.JavaPluginInfoProvider.JavaPluginInfo; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.view.proto.Deps; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.function.Consumer; import javax.annotation.Nullable; /** * Action builder for Java header compilation, to be used if --java_header_compilation is enabled. * * <p>The header compiler consumes the inputs of a java compilation, and produces an interface jar * that can be used as a compile-time jar by upstream targets. The header interface jar is * equivalent to the output of ijar, but unlike ijar the header compiler operates directly on Java * source files instead post-processing the class outputs of the compilation. Compiling the * interface jar from source moves javac off the build's critical path. * * <p>The implementation of the header compiler tool can be found under {@code * //src/java_tools/buildjar/java/com/google/devtools/build/java/turbine}. */ public class JavaHeaderCompileActionBuilder { private final RuleContext ruleContext; private Artifact outputJar; @Nullable private Artifact outputDepsProto; private ImmutableSet<Artifact> sourceFiles = ImmutableSet.of(); private ImmutableList<Artifact> sourceJars = ImmutableList.of(); private NestedSet<Artifact> classpathEntries = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private ImmutableList<Artifact> bootclasspathEntries = ImmutableList.of(); @Nullable private Label targetLabel; @Nullable private String injectingRuleKind; private PathFragment tempDirectory; private StrictDepsMode strictJavaDeps = StrictDepsMode.OFF; private boolean reduceClasspath = true; private NestedSet<Artifact> directJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private NestedSet<Artifact> compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER); private ImmutableList<String> javacOpts; private JavaPluginInfo plugins = JavaPluginInfo.empty(); private NestedSet<Artifact> additionalInputs = NestedSetBuilder.emptySet(Order.STABLE_ORDER); private NestedSet<Artifact> toolsJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); public JavaHeaderCompileActionBuilder(RuleContext ruleContext) { this.ruleContext = ruleContext; } /** Sets the output jdeps file. */ public JavaHeaderCompileActionBuilder setOutputDepsProto(@Nullable Artifact outputDepsProto) { this.outputDepsProto = outputDepsProto; return this; } /** Sets the direct dependency artifacts. */ public JavaHeaderCompileActionBuilder setDirectJars(NestedSet<Artifact> directJars) { checkNotNull(directJars, "directJars must not be null"); this.directJars = directJars; return this; } /** Sets the .jdeps artifacts for direct dependencies. */ public JavaHeaderCompileActionBuilder setCompileTimeDependencyArtifacts( NestedSet<Artifact> dependencyArtifacts) { checkNotNull(dependencyArtifacts, "dependencyArtifacts must not be null"); this.compileTimeDependencyArtifacts = dependencyArtifacts; return this; } /** Sets Java compiler flags. */ public JavaHeaderCompileActionBuilder setJavacOpts(ImmutableList<String> javacOpts) { checkNotNull(javacOpts, "javacOpts must not be null"); this.javacOpts = javacOpts; return this; } /** Sets the output jar. */ public JavaHeaderCompileActionBuilder setOutputJar(Artifact outputJar) { checkNotNull(outputJar, "outputJar must not be null"); this.outputJar = outputJar; return this; } /** Adds Java source files to compile. */ public JavaHeaderCompileActionBuilder setSourceFiles(ImmutableSet<Artifact> sourceFiles) { checkNotNull(sourceFiles, "sourceFiles must not be null"); this.sourceFiles = sourceFiles; return this; } /** Adds a jar archive of Java sources to compile. */ public JavaHeaderCompileActionBuilder setSourceJars(ImmutableList<Artifact> sourceJars) { checkNotNull(sourceJars, "sourceJars must not be null"); this.sourceJars = sourceJars; return this; } /** Sets the compilation classpath entries. */ public JavaHeaderCompileActionBuilder setClasspathEntries(NestedSet<Artifact> classpathEntries) { checkNotNull(classpathEntries, "classpathEntries must not be null"); this.classpathEntries = classpathEntries; return this; } /** Sets the compilation bootclasspath entries. */ public JavaHeaderCompileActionBuilder setBootclasspathEntries( ImmutableList<Artifact> bootclasspathEntries) { checkNotNull(bootclasspathEntries, "bootclasspathEntries must not be null"); this.bootclasspathEntries = bootclasspathEntries; return this; } /** Sets the annotation processors classpath entries. */ public JavaHeaderCompileActionBuilder setPlugins(JavaPluginInfo plugins) { checkNotNull(plugins, "plugins must not be null"); checkState(this.plugins.isEmpty()); this.plugins = plugins; return this; } /** Sets the label of the target being compiled. */ public JavaHeaderCompileActionBuilder setTargetLabel(@Nullable Label targetLabel) { this.targetLabel = targetLabel; return this; } /** Sets the injecting rule kind of the target being compiled. */ public JavaHeaderCompileActionBuilder setInjectingRuleKind(@Nullable String injectingRuleKind) { this.injectingRuleKind = injectingRuleKind; return this; } /** * Sets the path to a temporary directory, e.g. for extracting sourcejar entries to before * compilation. */ public JavaHeaderCompileActionBuilder setTempDirectory(PathFragment tempDirectory) { checkNotNull(tempDirectory, "tempDirectory must not be null"); this.tempDirectory = tempDirectory; return this; } /** Sets the Strict Java Deps mode. */ public JavaHeaderCompileActionBuilder setStrictJavaDeps(StrictDepsMode strictJavaDeps) { checkNotNull(strictJavaDeps, "strictJavaDeps must not be null"); this.strictJavaDeps = strictJavaDeps; return this; } /** Enables reduced classpaths. */ public JavaHeaderCompileActionBuilder setReduceClasspath(boolean reduceClasspath) { this.reduceClasspath = reduceClasspath; return this; } /** Sets the javabase inputs. */ public JavaHeaderCompileActionBuilder setAdditionalInputs(NestedSet<Artifact> additionalInputs) { checkNotNull(additionalInputs, "additionalInputs must not be null"); this.additionalInputs = additionalInputs; return this; } /** Sets the tools jars. */ public JavaHeaderCompileActionBuilder setToolsJars(NestedSet<Artifact> toolsJars) { checkNotNull(toolsJars, "toolsJars must not be null"); this.toolsJars = toolsJars; return this; } /** Builds and registers the action for a header compilation. */ public void build(JavaToolchainProvider javaToolchain, JavaRuntimeInfo hostJavabase) { checkNotNull(outputDepsProto, "outputDepsProto must not be null"); checkNotNull(sourceFiles, "sourceFiles must not be null"); checkNotNull(sourceJars, "sourceJars must not be null"); checkNotNull(classpathEntries, "classpathEntries must not be null"); checkNotNull(bootclasspathEntries, "bootclasspathEntries must not be null"); checkNotNull(tempDirectory, "tempDirectory must not be null"); checkNotNull(strictJavaDeps, "strictJavaDeps must not be null"); checkNotNull(directJars, "directJars must not be null"); checkNotNull(compileTimeDependencyArtifacts, "compileTimeDependencyArtifacts must not be null"); checkNotNull(javacOpts, "javacOpts must not be null"); // Invariant: if strictJavaDeps is OFF, then directJars and // dependencyArtifacts are ignored if (strictJavaDeps == StrictDepsMode.OFF) { directJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER); } // Enable the direct classpath optimization if there are no annotation processors. // N.B. we only check if the processor classes are empty, we don't care if there is plugin // data or dependencies if there are no annotation processors to run. This differs from // javac where java_plugin may be used with processor_class unset to declare Error Prone // plugins. boolean useDirectClasspath = plugins.processorClasses().isEmpty(); // Use the optimized 'direct' implementation if it is available, and either there are no // annotation processors or they are built in to the tool and listed in // java_toolchain.header_compiler_direct_processors. boolean useHeaderCompilerDirect = javaToolchain.getHeaderCompilerDirect() != null && javaToolchain .getHeaderCompilerBuiltinProcessors() .containsAll(plugins.processorClasses().toSet()); SpawnAction.Builder builder = new SpawnAction.Builder(); builder.setEnvironment( ruleContext.getConfiguration().getActionEnvironment().addFixedVariables(UTF8_ENVIRONMENT)); builder.setProgressMessage( new ProgressMessage( /* prefix= */ "Compiling Java headers", /* output= */ outputJar, /* sourceFiles= */ sourceFiles, /* sourceJars= */ sourceJars, /* plugins= */ plugins)); builder.addTransitiveTools(toolsJars); builder.addOutput(outputJar); builder.addOutput(outputDepsProto); builder.addTransitiveInputs(additionalInputs); builder.addInputs(bootclasspathEntries); builder.addInputs(sourceJars); builder.addInputs(sourceFiles); FilesToRunProvider headerCompiler = useHeaderCompilerDirect ? javaToolchain.getHeaderCompilerDirect() : javaToolchain.getHeaderCompiler(); // The header compiler is either a jar file that needs to be executed using // `java -jar <path>`, or an executable that can be run directly. if (!headerCompiler.getExecutable().getExtension().equals("jar")) { builder.addRunfilesSupplier(headerCompiler.getRunfilesSupplier()); builder.addTransitiveInputs(headerCompiler.getFilesToRun()); builder.setExecutable(headerCompiler.getExecutable()); } else { builder .addTransitiveInputs(hostJavabase.javaBaseInputsMiddleman()) .addInput(headerCompiler.getExecutable()); builder.setExecutable(hostJavabase.javaBinaryExecPath()); builder .executableArguments() .add("-Xverify:none") .addAll(javaToolchain.getJvmOptions()) .add("-jar") .addExecPath(headerCompiler.getExecutable()) .build(); } CustomCommandLine.Builder commandLine = CustomCommandLine.builder() .addExecPath("--output", outputJar) .addExecPath("--output_deps", outputDepsProto) .addPath("--temp_dir", tempDirectory) .addExecPaths("--bootclasspath", bootclasspathEntries) .addExecPaths("--sources", sourceFiles) .addExecPaths("--source_jars", sourceJars) .add("--injecting_rule_kind", injectingRuleKind); if (!javacOpts.isEmpty()) { commandLine.addAll("--javacopts", javacOpts); // terminate --javacopts with `--` to support javac flags that start with `--` commandLine.add("--"); } if (targetLabel != null) { commandLine.add("--target_label"); if (targetLabel.getPackageIdentifier().getRepository().isDefault() || targetLabel.getPackageIdentifier().getRepository().isMain()) { commandLine.addLabel(targetLabel); } else { // @-prefixed strings will be assumed to be params filenames and expanded, // so add an extra @ to escape it. commandLine.addPrefixedLabel("@", targetLabel); } } JavaConfiguration javaConfiguration = ruleContext.getConfiguration().getFragment(JavaConfiguration.class); if (javaConfiguration.getReduceJavaClasspath() == JavaClasspathMode.BAZEL) { if (javaConfiguration.inmemoryJdepsFiles()) { builder.setExecutionInfo( ImmutableMap.of( ExecutionRequirements.REMOTE_EXECUTION_INLINE_OUTPUTS, outputDepsProto.getExecPathString())); } builder.addResultConsumer(createResultConsumer(outputDepsProto)); } if (useDirectClasspath) { NestedSet<Artifact> classpath; if (!directJars.isEmpty() || classpathEntries.isEmpty()) { classpath = directJars; } else { classpath = classpathEntries; } builder.addTransitiveInputs(classpath); commandLine.addExecPaths("--classpath", classpath); commandLine.add("--nojavac_fallback"); ruleContext.registerAction( builder .addCommandLine( commandLine.build(), ParamFileInfo.builder(ParameterFileType.UNQUOTED).build()) .setMnemonic("Turbine") .build(ruleContext)); return; } // If we get here the action requires annotation processing, so add additional inputs and // flags needed for the javac-based header compiler implementations that supports // annotation processing. builder.addTransitiveInputs(classpathEntries); if (!useHeaderCompilerDirect) { builder.addTransitiveInputs(plugins.processorClasspath()); builder.addTransitiveInputs(plugins.data()); } builder.addTransitiveInputs(compileTimeDependencyArtifacts); commandLine.addExecPaths("--classpath", classpathEntries); commandLine.addAll("--processors", plugins.processorClasses()); commandLine.addAll( "--builtin_processors", Sets.intersection( plugins.processorClasses().toSet(), javaToolchain.getHeaderCompilerBuiltinProcessors())); commandLine.addAll("--processors", plugins.processorClasses()); if (!useHeaderCompilerDirect) { commandLine.addExecPaths("--processorpath", plugins.processorClasspath()); } if (strictJavaDeps != StrictDepsMode.OFF) { commandLine.addExecPaths("--direct_dependencies", directJars); if (!compileTimeDependencyArtifacts.isEmpty()) { commandLine.addExecPaths("--deps_artifacts", compileTimeDependencyArtifacts); } } if (reduceClasspath && strictJavaDeps != StrictDepsMode.OFF) { commandLine.add("--reduce_classpath"); } else { commandLine.add("--noreduce_classpath"); } ruleContext.registerAction( builder .addCommandLine( commandLine.build(), ParamFileInfo.builder(ParameterFileType.UNQUOTED).setCharset(ISO_8859_1).build()) .setMnemonic("JavacTurbine") .build(ruleContext)); } /** * Creates a consumer that reads the produced .jdeps file into memory. Pulled out into a separate * function to avoid capturing a data member, which would keep the entire builder instance alive. */ private static Consumer<Pair<ActionExecutionContext, List<SpawnResult>>> createResultConsumer( Artifact outputDepsProto) { return contextAndResults -> { ActionExecutionContext context = contextAndResults.getFirst(); JavaCompileActionContext javaContext = context.getContext(JavaCompileActionContext.class); if (javaContext == null) { return; } SpawnResult spawnResult = Iterables.getOnlyElement(contextAndResults.getSecond()); try { InputStream inMemoryOutput = spawnResult.getInMemoryOutput(outputDepsProto); try (InputStream input = inMemoryOutput == null ? context.getInputPath(outputDepsProto).getInputStream() : inMemoryOutput) { javaContext.insertDependencies(outputDepsProto, Deps.Dependencies.parseFrom(input)); } } catch (IOException e) { // Left empty. If we cannot read the .jdeps file now, we will read it later or throw // an appropriate error then. } }; } }
src/main/java/com/google/devtools/build/lib/rules/java/JavaHeaderCompileActionBuilder.java
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.java; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.devtools.build.lib.rules.java.JavaCompileActionBuilder.UTF8_ENVIRONMENT; import static java.nio.charset.StandardCharsets.ISO_8859_1; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ExecutionRequirements; import com.google.devtools.build.lib.actions.ParamFileInfo; import com.google.devtools.build.lib.actions.ParameterFile.ParameterFileType; import com.google.devtools.build.lib.actions.SpawnResult; import com.google.devtools.build.lib.analysis.FilesToRunProvider; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.SpawnAction; import com.google.devtools.build.lib.analysis.config.CoreOptionConverters.StrictDepsMode; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.rules.java.JavaCompileAction.ProgressMessage; import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaClasspathMode; import com.google.devtools.build.lib.rules.java.JavaPluginInfoProvider.JavaPluginInfo; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.view.proto.Deps; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.function.Consumer; import javax.annotation.Nullable; /** * Action builder for Java header compilation, to be used if --java_header_compilation is enabled. * * <p>The header compiler consumes the inputs of a java compilation, and produces an interface jar * that can be used as a compile-time jar by upstream targets. The header interface jar is * equivalent to the output of ijar, but unlike ijar the header compiler operates directly on Java * source files instead post-processing the class outputs of the compilation. Compiling the * interface jar from source moves javac off the build's critical path. * * <p>The implementation of the header compiler tool can be found under {@code * //src/java_tools/buildjar/java/com/google/devtools/build/java/turbine}. */ public class JavaHeaderCompileActionBuilder { private final RuleContext ruleContext; private Artifact outputJar; @Nullable private Artifact outputDepsProto; private ImmutableSet<Artifact> sourceFiles = ImmutableSet.of(); private ImmutableList<Artifact> sourceJars = ImmutableList.of(); private NestedSet<Artifact> classpathEntries = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private ImmutableList<Artifact> bootclasspathEntries = ImmutableList.of(); @Nullable private Label targetLabel; @Nullable private String injectingRuleKind; private PathFragment tempDirectory; private StrictDepsMode strictJavaDeps = StrictDepsMode.OFF; private boolean reduceClasspath = true; private NestedSet<Artifact> directJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private NestedSet<Artifact> compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER); private ImmutableList<String> javacOpts; private JavaPluginInfo plugins = JavaPluginInfo.empty(); private NestedSet<Artifact> additionalInputs = NestedSetBuilder.emptySet(Order.STABLE_ORDER); private NestedSet<Artifact> toolsJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); public JavaHeaderCompileActionBuilder(RuleContext ruleContext) { this.ruleContext = ruleContext; } /** Sets the output jdeps file. */ public JavaHeaderCompileActionBuilder setOutputDepsProto(@Nullable Artifact outputDepsProto) { this.outputDepsProto = outputDepsProto; return this; } /** Sets the direct dependency artifacts. */ public JavaHeaderCompileActionBuilder setDirectJars(NestedSet<Artifact> directJars) { checkNotNull(directJars, "directJars must not be null"); this.directJars = directJars; return this; } /** Sets the .jdeps artifacts for direct dependencies. */ public JavaHeaderCompileActionBuilder setCompileTimeDependencyArtifacts( NestedSet<Artifact> dependencyArtifacts) { checkNotNull(dependencyArtifacts, "dependencyArtifacts must not be null"); this.compileTimeDependencyArtifacts = dependencyArtifacts; return this; } /** Sets Java compiler flags. */ public JavaHeaderCompileActionBuilder setJavacOpts(ImmutableList<String> javacOpts) { checkNotNull(javacOpts, "javacOpts must not be null"); this.javacOpts = javacOpts; return this; } /** Sets the output jar. */ public JavaHeaderCompileActionBuilder setOutputJar(Artifact outputJar) { checkNotNull(outputJar, "outputJar must not be null"); this.outputJar = outputJar; return this; } /** Adds Java source files to compile. */ public JavaHeaderCompileActionBuilder setSourceFiles(ImmutableSet<Artifact> sourceFiles) { checkNotNull(sourceFiles, "sourceFiles must not be null"); this.sourceFiles = sourceFiles; return this; } /** Adds a jar archive of Java sources to compile. */ public JavaHeaderCompileActionBuilder setSourceJars(ImmutableList<Artifact> sourceJars) { checkNotNull(sourceJars, "sourceJars must not be null"); this.sourceJars = sourceJars; return this; } /** Sets the compilation classpath entries. */ public JavaHeaderCompileActionBuilder setClasspathEntries(NestedSet<Artifact> classpathEntries) { checkNotNull(classpathEntries, "classpathEntries must not be null"); this.classpathEntries = classpathEntries; return this; } /** Sets the compilation bootclasspath entries. */ public JavaHeaderCompileActionBuilder setBootclasspathEntries( ImmutableList<Artifact> bootclasspathEntries) { checkNotNull(bootclasspathEntries, "bootclasspathEntries must not be null"); this.bootclasspathEntries = bootclasspathEntries; return this; } /** Sets the annotation processors classpath entries. */ public JavaHeaderCompileActionBuilder setPlugins(JavaPluginInfo plugins) { checkNotNull(plugins, "plugins must not be null"); checkState(this.plugins.isEmpty()); this.plugins = plugins; return this; } /** Sets the label of the target being compiled. */ public JavaHeaderCompileActionBuilder setTargetLabel(@Nullable Label targetLabel) { this.targetLabel = targetLabel; return this; } /** Sets the injecting rule kind of the target being compiled. */ public JavaHeaderCompileActionBuilder setInjectingRuleKind(@Nullable String injectingRuleKind) { this.injectingRuleKind = injectingRuleKind; return this; } /** * Sets the path to a temporary directory, e.g. for extracting sourcejar entries to before * compilation. */ public JavaHeaderCompileActionBuilder setTempDirectory(PathFragment tempDirectory) { checkNotNull(tempDirectory, "tempDirectory must not be null"); this.tempDirectory = tempDirectory; return this; } /** Sets the Strict Java Deps mode. */ public JavaHeaderCompileActionBuilder setStrictJavaDeps(StrictDepsMode strictJavaDeps) { checkNotNull(strictJavaDeps, "strictJavaDeps must not be null"); this.strictJavaDeps = strictJavaDeps; return this; } /** Enables reduced classpaths. */ public JavaHeaderCompileActionBuilder setReduceClasspath(boolean reduceClasspath) { this.reduceClasspath = reduceClasspath; return this; } /** Sets the javabase inputs. */ public JavaHeaderCompileActionBuilder setAdditionalInputs(NestedSet<Artifact> additionalInputs) { checkNotNull(additionalInputs, "additionalInputs must not be null"); this.additionalInputs = additionalInputs; return this; } /** Sets the tools jars. */ public JavaHeaderCompileActionBuilder setToolsJars(NestedSet<Artifact> toolsJars) { checkNotNull(toolsJars, "toolsJars must not be null"); this.toolsJars = toolsJars; return this; } /** Builds and registers the action for a header compilation. */ public void build(JavaToolchainProvider javaToolchain, JavaRuntimeInfo hostJavabase) { checkNotNull(outputDepsProto, "outputDepsProto must not be null"); checkNotNull(sourceFiles, "sourceFiles must not be null"); checkNotNull(sourceJars, "sourceJars must not be null"); checkNotNull(classpathEntries, "classpathEntries must not be null"); checkNotNull(bootclasspathEntries, "bootclasspathEntries must not be null"); checkNotNull(tempDirectory, "tempDirectory must not be null"); checkNotNull(strictJavaDeps, "strictJavaDeps must not be null"); checkNotNull(directJars, "directJars must not be null"); checkNotNull(compileTimeDependencyArtifacts, "compileTimeDependencyArtifacts must not be null"); checkNotNull(javacOpts, "javacOpts must not be null"); // Invariant: if strictJavaDeps is OFF, then directJars and // dependencyArtifacts are ignored if (strictJavaDeps == StrictDepsMode.OFF) { directJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER); } // Enable the direct classpath optimization if there are no annotation processors. // N.B. we only check if the processor classes are empty, we don't care if there is plugin // data or dependencies if there are no annotation processors to run. This differs from // javac where java_plugin may be used with processor_class unset to declare Error Prone // plugins. boolean useDirectClasspath = plugins.processorClasses().isEmpty(); // Use the optimized 'direct' implementation if it is available, and either there are no // annotation processors or they are built in to the tool and listed in // java_toolchain.header_compiler_direct_processors. boolean useHeaderCompilerDirect = javaToolchain.getHeaderCompilerDirect() != null && javaToolchain .getHeaderCompilerBuiltinProcessors() .containsAll(plugins.processorClasses().toSet()); SpawnAction.Builder builder = new SpawnAction.Builder(); builder.setEnvironment( ruleContext.getConfiguration().getActionEnvironment().addFixedVariables(UTF8_ENVIRONMENT)); builder.setProgressMessage( new ProgressMessage( /* prefix= */ "Compiling Java headers", /* output= */ outputJar, /* sourceFiles= */ sourceFiles, /* sourceJars= */ sourceJars, /* plugins= */ plugins)); builder.addTransitiveTools(toolsJars); builder.addOutput(outputJar); builder.addOutput(outputDepsProto); builder.addTransitiveInputs(additionalInputs); builder.addInputs(bootclasspathEntries); builder.addInputs(sourceJars); builder.addInputs(sourceFiles); FilesToRunProvider headerCompiler = useHeaderCompilerDirect ? javaToolchain.getHeaderCompilerDirect() : javaToolchain.getHeaderCompiler(); // The header compiler is either a jar file that needs to be executed using // `java -jar <path>`, or an executable that can be run directly. if (!headerCompiler.getExecutable().getExtension().equals("jar")) { builder.addRunfilesSupplier(headerCompiler.getRunfilesSupplier()); builder.addTransitiveInputs(headerCompiler.getFilesToRun()); builder.setExecutable(headerCompiler.getExecutable()); } else { builder .addTransitiveInputs(hostJavabase.javaBaseInputsMiddleman()) .addInput(headerCompiler.getExecutable()); builder.setExecutable(hostJavabase.javaBinaryExecPath()); builder .executableArguments() .add("-Xverify:none") .add("-jar") .addExecPath(headerCompiler.getExecutable()) .build(); } CustomCommandLine.Builder commandLine = CustomCommandLine.builder() .addExecPath("--output", outputJar) .addExecPath("--output_deps", outputDepsProto) .addPath("--temp_dir", tempDirectory) .addExecPaths("--bootclasspath", bootclasspathEntries) .addExecPaths("--sources", sourceFiles) .addExecPaths("--source_jars", sourceJars) .add("--injecting_rule_kind", injectingRuleKind); if (!javacOpts.isEmpty()) { commandLine.addAll("--javacopts", javacOpts); // terminate --javacopts with `--` to support javac flags that start with `--` commandLine.add("--"); } if (targetLabel != null) { commandLine.add("--target_label"); if (targetLabel.getPackageIdentifier().getRepository().isDefault() || targetLabel.getPackageIdentifier().getRepository().isMain()) { commandLine.addLabel(targetLabel); } else { // @-prefixed strings will be assumed to be params filenames and expanded, // so add an extra @ to escape it. commandLine.addPrefixedLabel("@", targetLabel); } } JavaConfiguration javaConfiguration = ruleContext.getConfiguration().getFragment(JavaConfiguration.class); if (javaConfiguration.getReduceJavaClasspath() == JavaClasspathMode.BAZEL) { if (javaConfiguration.inmemoryJdepsFiles()) { builder.setExecutionInfo( ImmutableMap.of( ExecutionRequirements.REMOTE_EXECUTION_INLINE_OUTPUTS, outputDepsProto.getExecPathString())); } builder.addResultConsumer(createResultConsumer(outputDepsProto)); } if (useDirectClasspath) { NestedSet<Artifact> classpath; if (!directJars.isEmpty() || classpathEntries.isEmpty()) { classpath = directJars; } else { classpath = classpathEntries; } builder.addTransitiveInputs(classpath); commandLine.addExecPaths("--classpath", classpath); commandLine.add("--nojavac_fallback"); ruleContext.registerAction( builder .addCommandLine( commandLine.build(), ParamFileInfo.builder(ParameterFileType.UNQUOTED).build()) .setMnemonic("Turbine") .build(ruleContext)); return; } // If we get here the action requires annotation processing, so add additional inputs and // flags needed for the javac-based header compiler implementations that supports // annotation processing. builder.addTransitiveInputs(classpathEntries); if (!useHeaderCompilerDirect) { builder.addTransitiveInputs(plugins.processorClasspath()); builder.addTransitiveInputs(plugins.data()); } builder.addTransitiveInputs(compileTimeDependencyArtifacts); commandLine.addExecPaths("--classpath", classpathEntries); commandLine.addAll("--processors", plugins.processorClasses()); commandLine.addAll( "--builtin_processors", Sets.intersection( plugins.processorClasses().toSet(), javaToolchain.getHeaderCompilerBuiltinProcessors())); commandLine.addAll("--processors", plugins.processorClasses()); if (!useHeaderCompilerDirect) { commandLine.addExecPaths("--processorpath", plugins.processorClasspath()); } if (strictJavaDeps != StrictDepsMode.OFF) { commandLine.addExecPaths("--direct_dependencies", directJars); if (!compileTimeDependencyArtifacts.isEmpty()) { commandLine.addExecPaths("--deps_artifacts", compileTimeDependencyArtifacts); } } if (reduceClasspath && strictJavaDeps != StrictDepsMode.OFF) { commandLine.add("--reduce_classpath"); } else { commandLine.add("--noreduce_classpath"); } ruleContext.registerAction( builder .addCommandLine( commandLine.build(), ParamFileInfo.builder(ParameterFileType.UNQUOTED).setCharset(ISO_8859_1).build()) .setMnemonic("JavacTurbine") .build(ruleContext)); } /** * Creates a consumer that reads the produced .jdeps file into memory. Pulled out into a separate * function to avoid capturing a data member, which would keep the entire builder instance alive. */ private static Consumer<Pair<ActionExecutionContext, List<SpawnResult>>> createResultConsumer( Artifact outputDepsProto) { return contextAndResults -> { ActionExecutionContext context = contextAndResults.getFirst(); JavaCompileActionContext javaContext = context.getContext(JavaCompileActionContext.class); if (javaContext == null) { return; } SpawnResult spawnResult = Iterables.getOnlyElement(contextAndResults.getSecond()); try { InputStream inMemoryOutput = spawnResult.getInMemoryOutput(outputDepsProto); try (InputStream input = inMemoryOutput == null ? context.getInputPath(outputDepsProto).getInputStream() : inMemoryOutput) { javaContext.insertDependencies(outputDepsProto, Deps.Dependencies.parseFrom(input)); } } catch (IOException e) { // Left empty. If we cannot read the .jdeps file now, we will read it later or throw // an appropriate error then. } }; } }
Reintroduce a step that was accidentally dropped in https://github.com/bazelbuild/bazel/commit/ea971cb3855cb394fcdd1f160ee95b263995685c PiperOrigin-RevId: 262640794
src/main/java/com/google/devtools/build/lib/rules/java/JavaHeaderCompileActionBuilder.java
Reintroduce a step that was accidentally dropped in https://github.com/bazelbuild/bazel/commit/ea971cb3855cb394fcdd1f160ee95b263995685c
Java
apache-2.0
b7d7cf24cd6515c0c54bed01d0747bf6cbf64700
0
skjolber/3d-bin-container-packing,skjolber/3d-bin-container-packing,skjolber/3d-bin-container-packing,skjolber/3d-bin-container-packing
package com.github.skjolberg.packing; import net.jqwik.api.*; import net.jqwik.api.arbitraries.IntegerArbitrary; import net.jqwik.api.constraints.IntRange; import net.jqwik.api.constraints.Size; import java.util.List; import java.util.function.BiFunction; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.singletonList; import static net.jqwik.api.Arbitraries.integers; import static org.assertj.core.api.Assertions.assertThat; class BruteForcePropertyBasedTests { // The maximum number of different, random items which can be reliably packed with brute force // seems to be between 10 and 15 @Property void bunchOfDifferentBoxesShouldFitInContainers(@ForAll @Size(min = 1, max = 11) List<BoxItem> items) { final Box empty = new Box(0, 0, 0, 0); final List<Container> containers = Stream.of(accumulateByDepth, accumulateByWidth, accumulateByHeight) .map(accumulator -> largeEnoughContainer(items, empty, accumulator)) .collect(Collectors.toList()); // only useful to debug when packaging fails System.out.printf("packing %d items into %s%n", items.size(), containers); System.out.println(items); final Container pack = new BruteForcePackager(containers).pack(items, System.currentTimeMillis() + 300); assertThat(pack).isNotNull(); } @Property void identicalBoxesShouldFitInContainers(@ForAll BoxItem item, @ForAll @IntRange(min = 1, max = 10) int countBySide) { final int totalCount = countBySide * countBySide * countBySide; final BoxItem repeatedItems = new BoxItem(item.getBox(), countBySide); //TODO: we could also randomly rotate the items final List<Container> containers = largeEnoughContainers(item, totalCount); final Container pack = new BruteForcePackager(containers).pack(singletonList(repeatedItems), System.currentTimeMillis() + 300); assertThat(pack).isNotNull(); } /** * Prepare containers which are just the right size for the items by stacking them on 1, 2 or 3 directions. */ private List<Container> largeEnoughContainers(final BoxItem item, final int countBySide) { final int totalCount = countBySide * countBySide * countBySide; final Box box = item.getBox(); Container threeDim = new Container( box.getWidth() * countBySide, box.getDepth() * countBySide, box.getHeight() * countBySide, box.getWeight() * totalCount); Container twoDim = new Container( box.getWidth() * countBySide * countBySide, box.getDepth() * countBySide, box.getHeight(), box.getWeight() * totalCount); Container oneDim = new Container( box.getWidth() * countBySide * countBySide * countBySide, box.getDepth(), box.getHeight(), box.getWeight() * totalCount); return Stream .of(threeDim, twoDim, oneDim) .flatMap(this::rotations) .collect(Collectors.toList()); } /** * The 6 different possible rotations of a container. */ private Stream<Container> rotations(final Container container) { final int width = container.getWidth(); final int height = container.getHeight(); final int depth = container.getDepth(); final int weight = container.getWeight(); return Stream.of( new Container(width, height, depth, weight), new Container(width, depth, height, weight), new Container(height, width, depth, weight), new Container(height, depth, width, weight), new Container(depth, height, width, weight), new Container(depth, width, height, weight)); } private Container largeEnoughContainer(final List<BoxItem> items, final Box empty, final BiFunction<Box, BoxItem, Box> accumulator) { final Box largeEnough = items.stream().reduce(empty, accumulator, BruteForcePropertyBasedTests::add); return new Container(largeEnough, largeEnough.getWeight()); } private BiFunction<Box, BoxItem, Box> accumulateByDepth = (acc, boxItem) -> new Box( acc.getWidth() + boxItem.getBox().getWidth(), acc.getHeight() + boxItem.getBox().getHeight(), acc.getDepth() + boxItem.getBox().getDepth() * boxItem.getCount(), acc.getWeight() + boxItem.getBox().getWeight() * boxItem.getCount()); private BiFunction<Box, BoxItem, Box> accumulateByWidth = (acc, boxItem) -> new Box( acc.getWidth() + boxItem.getBox().getWidth() * boxItem.getCount(), acc.getHeight() + boxItem.getBox().getHeight(), acc.getDepth() + boxItem.getBox().getDepth(), acc.getWeight() + boxItem.getBox().getWeight() * boxItem.getCount()); private BiFunction<Box, BoxItem, Box> accumulateByHeight = (acc, boxItem) -> new Box( acc.getWidth() + boxItem.getBox().getWidth(), acc.getHeight() + boxItem.getBox().getHeight() * boxItem.getCount(), acc.getDepth() + boxItem.getBox().getDepth(), acc.getWeight() + boxItem.getBox().getWeight() * boxItem.getCount()); private static Box add(Box b1, Box b2) { return new Box( b1.getWidth() + b2.getWidth(), b1.getDepth() + b2.getDepth(), b1.getHeight() + b2.getHeight(), b1.getWeight() + b2.getWeight()); } @Provide Arbitrary<Dimension> dimensionGenerated() { return Combinators .combine(sensiblePositiveNumber(), sensiblePositiveNumber(), sensiblePositiveNumber()) .as(Dimension::new); } @Provide Arbitrary<BoxItem> boxItemGenerated() { return Combinators .combine(sensiblePositiveNumber(), dimensionGenerated(), integers().between(1, 1)) .as((weight, dimension, count) -> new BoxItem(new Box(dimension, weight), count)); } private IntegerArbitrary sensiblePositiveNumber() { return integers().between(1, 9999); } }
src/test/java/com/github/skjolberg/packing/BruteForcePropertyBasedTests.java
package com.github.skjolberg.packing; import net.jqwik.api.*; import net.jqwik.api.arbitraries.IntegerArbitrary; import net.jqwik.api.constraints.Size; import java.util.List; import java.util.function.BiFunction; import java.util.stream.Collectors; import java.util.stream.Stream; import static net.jqwik.api.Arbitraries.integers; import static org.assertj.core.api.Assertions.assertThat; class BruteForcePropertyBasedTests { // The maximum number of different, random items which can be reliably packed with brute force // seems to be between 10 and 15 @Property void bunchOfDifferentBoxesShouldFitInContainers(@ForAll @Size(min = 1, max = 11) List<BoxItem> items) { final Box empty = new Box(0, 0, 0, 0); final List<Container> containers = Stream.of(accumulateByDepth, accumulateByWidth, accumulateByHeight) .map(accumulator -> largeEnoughContainer(items, empty, accumulator)) .collect(Collectors.toList()); // only useful to debug when packaging fails System.out.printf("packing %d items into %s%n", items.size(), containers); System.out.println(items); final Container pack = new BruteForcePackager(containers).pack(items, System.currentTimeMillis() + 300); assertThat(pack).isNotNull(); } private Container largeEnoughContainer(final List<BoxItem> items, final Box empty, final BiFunction<Box, BoxItem, Box> accumulator) { final Box largeEnough = items.stream().reduce(empty, accumulator, BruteForcePropertyBasedTests::add); return new Container(largeEnough, largeEnough.getWeight()); } private BiFunction<Box, BoxItem, Box> accumulateByDepth = (acc, boxItem) -> new Box( acc.getWidth() + boxItem.getBox().getWidth(), acc.getHeight() + boxItem.getBox().getHeight(), acc.getDepth() + boxItem.getBox().getDepth() * boxItem.getCount(), acc.getWeight() + boxItem.getBox().getWeight() * boxItem.getCount()); private BiFunction<Box, BoxItem, Box> accumulateByWidth = (acc, boxItem) -> new Box( acc.getWidth() + boxItem.getBox().getWidth() * boxItem.getCount(), acc.getHeight() + boxItem.getBox().getHeight(), acc.getDepth() + boxItem.getBox().getDepth(), acc.getWeight() + boxItem.getBox().getWeight() * boxItem.getCount()); private BiFunction<Box, BoxItem, Box> accumulateByHeight = (acc, boxItem) -> new Box( acc.getWidth() + boxItem.getBox().getWidth(), acc.getHeight() + boxItem.getBox().getHeight() * boxItem.getCount(), acc.getDepth() + boxItem.getBox().getDepth(), acc.getWeight() + boxItem.getBox().getWeight() * boxItem.getCount()); private static Box add(Box b1, Box b2) { return new Box( b1.getWidth() + b2.getWidth(), b1.getDepth() + b2.getDepth(), b1.getHeight() + b2.getHeight(), b1.getWeight() + b2.getWeight()); } @Provide Arbitrary<Dimension> dimensionGenerated() { return Combinators .combine(sensiblePositiveNumber(), sensiblePositiveNumber(), sensiblePositiveNumber()) .as(Dimension::new); } @Provide Arbitrary<BoxItem> boxItemGenerated() { return Combinators .combine(sensiblePositiveNumber(), dimensionGenerated(), integers().between(1, 1)) .as((weight, dimension, count) -> new BoxItem(new Box(dimension, weight), count)); } private IntegerArbitrary sensiblePositiveNumber() { return integers().between(1, 9999); } }
test packing several identical boxes in just-fitting containers
src/test/java/com/github/skjolberg/packing/BruteForcePropertyBasedTests.java
test packing several identical boxes in just-fitting containers
Java
apache-2.0
dfbec28c8aed1b6a089dfab55263a83f58ce62d7
0
jior/glaf,jior/glaf,jior/glaf,jior/glaf,jior/glaf,jior/glaf
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.glaf.core.config; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.SQLException; import java.util.*; import javax.naming.InitialContext; import javax.sql.DataSource; import org.apache.commons.dbcp.BasicDataSource; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.Resource; import com.glaf.core.config.SystemProperties; import com.glaf.core.context.ContextFactory; import com.glaf.core.util.Constants; import com.glaf.core.util.DBUtils; import com.glaf.core.util.PropertiesUtils; public class DataSourceConfig { protected static String databaseType; protected static Properties databaseTypeMappings = getDefaultDatabaseTypeMappings(); protected static Properties dialetTypeMappings = getDialectMappings(); protected static Properties hibernateDialetTypeMappings = getHibernateDialectMappings(); protected static Properties properties = new Properties(); protected static boolean loadJdbcProperties = false; static { try { reload(); } catch (Exception ex) { } } public static boolean checkConnection() { Connection connection = null; DataSource ds = null; try { if (loadJdbcProperties) { if (StringUtils.isNotEmpty(getJndiName())) { InitialContext ctx = new InitialContext(); ds = (DataSource) ctx.lookup(getJndiName()); connection = ds.getConnection(); } else { BasicDataSource bds = new BasicDataSource(); bds.setDriverClassName(getJdbcDriverClass()); bds.setUrl(getJdbcConnectionURL()); bds.setUsername(getJdbcUsername()); bds.setPassword(getJdbcPassword()); connection = bds.getConnection(); } } else { ds = ContextFactory.getBean("dataSource"); connection = ds.getConnection(); } if (connection != null) { return true; } } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { e.printStackTrace(); } } return false; } public static boolean eq(String key, String value) { if (key != null && value != null) { String x = properties.getProperty(key); if (StringUtils.equals(value, x)) { return true; } } return false; } public static boolean getBoolean(String key) { if (hasObject(key)) { String value = properties.getProperty(key); return Boolean.valueOf(value).booleanValue(); } return false; } public static Connection getConnection() { Connection connection = null; DataSource ds = null; try { if (loadJdbcProperties) { if (StringUtils.isNotEmpty(getJndiName())) { InitialContext ctx = new InitialContext(); ds = (DataSource) ctx.lookup(getJndiName()); connection = ds.getConnection(); } else { BasicDataSource bds = new BasicDataSource(); bds.setDriverClassName(getJdbcDriverClass()); bds.setUrl(getJdbcConnectionURL()); bds.setUsername(getJdbcUsername()); bds.setPassword(getJdbcPassword()); connection = bds.getConnection(); } } else { ds = ContextFactory.getBean("dataSource"); connection = ds.getConnection(); } } catch (Exception ex) { ex.printStackTrace(); } return connection; } public static Connection getConnection(Properties props) { Connection connection = null; DataSource ds = null; try { if (loadJdbcProperties) { if (StringUtils.isNotEmpty(props .getProperty(Environment.DATASOURCE))) { InitialContext ctx = new InitialContext(); ds = (DataSource) ctx.lookup(props .getProperty(Environment.DATASOURCE)); connection = ds.getConnection(); } else { BasicDataSource bds = new BasicDataSource(); bds.setDriverClassName(props .getProperty(Environment.DRIVER)); bds.setUrl(props.getProperty(Environment.URL)); bds.setUsername(props.getProperty(Environment.USER)); bds.setPassword(props.getProperty(Environment.PASS)); connection = bds.getConnection(); } } else { ds = ContextFactory.getBean("dataSource"); connection = ds.getConnection(); } } catch (Exception ex) { ex.printStackTrace(); } return connection; } public static String getDatabaseDialect() { if (getDatabaseType() != null) { return dialetTypeMappings.getProperty(getDatabaseType()); } return null; } public static String getDatabaseType() { if (databaseType == null) { initDatabaseType(); } return databaseType; } public static String getDatabaseType(Connection connection) { if (connection != null) { String databaseProductName = null; try { DatabaseMetaData databaseMetaData = connection.getMetaData(); databaseProductName = databaseMetaData.getDatabaseProductName(); } catch (SQLException ex) { ex.printStackTrace(); throw new RuntimeException(ex); } String dbType = databaseTypeMappings .getProperty(databaseProductName); if (dbType == null) { throw new RuntimeException( "couldn't deduct database type from database product name '" + databaseProductName + "'"); } return dbType; } return null; } protected static Properties getDefaultDatabaseTypeMappings() { Properties databaseTypeMappings = new Properties(); databaseTypeMappings.setProperty("H2", "h2"); databaseTypeMappings.setProperty("MySQL", "mysql"); databaseTypeMappings.setProperty("Oracle", "oracle"); databaseTypeMappings.setProperty("PostgreSQL", "postgresql"); databaseTypeMappings.setProperty("Microsoft SQL Server", "sqlserver"); databaseTypeMappings.setProperty("DB2", "db2"); databaseTypeMappings.setProperty("DB2/NT", "db2"); databaseTypeMappings.setProperty("DB2/NT64", "db2"); databaseTypeMappings.setProperty("DB2 UDP", "db2"); databaseTypeMappings.setProperty("DB2/LINUX", "db2"); databaseTypeMappings.setProperty("DB2/LINUX390", "db2"); databaseTypeMappings.setProperty("DB2/LINUXZ64", "db2"); databaseTypeMappings.setProperty("DB2/400 SQL", "db2"); databaseTypeMappings.setProperty("DB2/6000", "db2"); databaseTypeMappings.setProperty("DB2 UDB iSeries", "db2"); databaseTypeMappings.setProperty("DB2/AIX64", "db2"); databaseTypeMappings.setProperty("DB2/HPUX", "db2"); databaseTypeMappings.setProperty("DB2/HP64", "db2"); databaseTypeMappings.setProperty("DB2/SUN", "db2"); databaseTypeMappings.setProperty("DB2/SUN64", "db2"); databaseTypeMappings.setProperty("DB2/PTX", "db2"); databaseTypeMappings.setProperty("DB2/2", "db2"); return databaseTypeMappings; } protected static Properties getDialectMappings() { Properties dialectMappings = new Properties(); dialectMappings.setProperty("h2", "com.glaf.core.dialect.H2Dialect"); dialectMappings.setProperty("mysql", "com.glaf.core.dialect.MySQLDialect"); dialectMappings.setProperty("oracle", "com.glaf.core.dialect.OracleDialect"); dialectMappings.setProperty("postgresql", "com.glaf.core.dialect.PostgreSQLDialect"); dialectMappings.setProperty("sqlserver", "com.glaf.core.dialect.SQLServerDialect"); dialectMappings.setProperty("db2", "com.glaf.core.dialect.DB2Dialect"); return dialectMappings; } public static double getDouble(String key) { if (hasObject(key)) { String value = properties.getProperty(key); return Double.valueOf(value).doubleValue(); } return 0; } public static String getHibernateDialect() { if (getDatabaseType() != null) { return hibernateDialetTypeMappings.getProperty(getDatabaseType()); } return null; } protected static Properties getHibernateDialectMappings() { Properties dialectMappings = new Properties(); dialectMappings.setProperty("h2", "org.hibernate.dialect.H2Dialect"); dialectMappings.setProperty("mysql", "org.hibernate.dialect.MySQL5Dialect"); dialectMappings.setProperty("oracle", "org.hibernate.dialect.Oracle10gDialect"); dialectMappings.setProperty("postgresql", "org.hibernate.dialect.PostgreSQLDialect"); dialectMappings.setProperty("sqlserver", "org.hibernate.dialect.SQLServerDialect"); dialectMappings.setProperty("db2", "org.hibernate.dialect.DB2Dialect"); return dialectMappings; } public static int getInt(String key) { if (hasObject(key)) { String value = properties.getProperty(key); return Integer.valueOf(value).intValue(); } return 0; } public static String getJdbcConnectionURL() { return getString(Environment.URL); } public static String getJdbcDriverClass() { return getString(Environment.DRIVER); } public static String getJdbcPassword() { return getString(Environment.PASS); } public static String getJdbcUsername() { return getString(Environment.USER); } public static String getJndiName() { return getString(Environment.DATASOURCE); } public static long getLong(String key) { if (hasObject(key)) { String value = properties.getProperty(key); return Long.valueOf(value).longValue(); } return 0; } public static Properties getProperties() { return properties; } public static String getString(String key) { if (hasObject(key)) { String value = properties.getProperty(key); if (value == null) { value = properties.getProperty(key.toUpperCase()); } return value; } return null; } public static boolean hasObject(String key) { if (properties == null || key == null) { return false; } String value = properties.getProperty(key); if (value != null) { return true; } return false; } public static void initDatabaseType() { Connection connection = null; DataSource ds = null; try { if (loadJdbcProperties) { if (StringUtils.isNotEmpty(getJndiName())) { InitialContext ctx = new InitialContext(); ds = (DataSource) ctx.lookup(getJndiName()); connection = ds.getConnection(); } else { BasicDataSource bds = new BasicDataSource(); bds.setDriverClassName(getJdbcDriverClass()); bds.setUrl(getJdbcConnectionURL()); bds.setUsername(getJdbcUsername()); bds.setPassword(getJdbcPassword()); connection = bds.getConnection(); } } else { ds = ContextFactory.getBean("dataSource"); connection = ds.getConnection(); } if (connection != null) { DatabaseMetaData databaseMetaData = connection.getMetaData(); String databaseProductName = databaseMetaData .getDatabaseProductName(); System.out.println("database product name: '" + databaseProductName + "'"); databaseType = databaseTypeMappings .getProperty(databaseProductName); if (databaseType == null) { throw new RuntimeException( "couldn't deduct database type from database product name '" + databaseProductName + "'"); } System.out.println("using database type: " + databaseType); if (SystemProperties.getBoolean("hibernate.cfg.update")) { reconfigHibernate(); } } } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { e.printStackTrace(); } } } public static boolean isBooleanDatabase() { String databaseType = getDatabaseType(); if (DBUtils.POSTGRESQL.equalsIgnoreCase(databaseType)) { return true; } return false; } protected static void reconfigHibernate() { Resource resource = new ClassPathResource("/hibernate.properties"); try { Properties p = PropertiesUtils.loadProperties(resource .getInputStream()); if (StringUtils.isNotEmpty(getJndiName())) { p.put("hibernate.connection.datasource", getJndiName()); p.remove("hibernate.connection.driver_class"); p.remove("hibernate.connection.url"); p.remove("hibernate.connection.username"); p.remove("hibernate.connection.password"); p.remove("hibernate.connection.provider_class"); p.remove("hibernate.connection.pool_size"); p.remove("hibernate.connection.autocommit"); p.remove("hibernate.c3p0.max_size"); p.remove("hibernate.c3p0.min_size"); p.remove("hibernate.c3p0.timeout"); p.remove("hibernate.c3p0.max_statements"); p.remove("hibernate.c3p0.acquire_increment"); p.remove("hibernate.c3p0.idle_test_period"); p.remove("hibernate.c3p0.validate"); } else { if (getJdbcDriverClass() != null) { p.put("hibernate.connection.driver_class", getJdbcDriverClass()); } if (getJdbcConnectionURL() != null) { p.put("hibernate.connection.url", getJdbcConnectionURL()); } if (getJdbcUsername() != null) { p.put("hibernate.connection.username", getJdbcUsername()); } if (getJdbcPassword() != null) { p.put("hibernate.connection.password", getJdbcPassword()); } else { p.put("hibernate.connection.password", ""); } p.put("hibernate.connection.provider_class", "org.hibernate.connection.C3P0ConnectionProvider"); p.put("hibernate.connection.autocommit", "true"); p.put("hibernate.c3p0.max_size", "50"); p.put("hibernate.c3p0.min_size", "5"); p.put("hibernate.c3p0.timeout", "5000"); p.put("hibernate.c3p0.max_statements", "100"); p.put("hibernate.c3p0.acquire_increment", "2"); p.put("hibernate.c3p0.idle_test_period", "3000"); p.put("hibernate.c3p0.validate", "false"); p.remove("hibernate.connection.datasource"); } if (getHibernateDialect() != null) { p.put("hibernate.dialect", getHibernateDialect()); } Map<String, String> treeMap = new TreeMap<String, String>(); Set<String> keys = p.stringPropertyNames(); for (String key : keys) { treeMap.put(key, p.getProperty(key)); } PropertiesUtils.save(resource, treeMap); } catch (IOException ex) { System.out.println("hibernate.propertiesļ"); } } public static void reload() { synchronized (DataSourceConfig.class) { InputStream inputStream = null; try { String filename = SystemProperties.getConfigRootPath() + Constants.DEFAULT_JDBC_CONFIG; Resource resource = new FileSystemResource(filename); inputStream = new FileInputStream(resource.getFile() .getAbsolutePath()); properties.clear(); Properties p = PropertiesUtils.loadProperties(inputStream); if (p != null) { Enumeration<?> e = p.keys(); while (e.hasMoreElements()) { String key = (String) e.nextElement(); String value = p.getProperty(key); properties.setProperty(key, value); } } inputStream.close(); inputStream = null; if (StringUtils.isNotEmpty(getJdbcConnectionURL())) { if (StringUtils .contains(getJdbcConnectionURL(), "jdbc:h2:")) { databaseType = "h2"; } else if (StringUtils.contains(getJdbcConnectionURL(), "jdbc:oracle:")) { databaseType = "oracle"; } else if (StringUtils.contains(getJdbcConnectionURL(), "jdbc:postgresql:")) { databaseType = "postgresql"; } else if (StringUtils.contains(getJdbcConnectionURL(), "jdbc:db2:")) { databaseType = "db2"; } else if (StringUtils.contains(getJdbcConnectionURL(), "jdbc:sqlserver:")) { databaseType = "sqlserver"; } else if (StringUtils.contains(getJdbcConnectionURL(), "jdbc:mysql:")) { databaseType = "mysql"; } } else { if (properties.getProperty("type") != null) { databaseType = properties.getProperty("type"); } } loadJdbcProperties = true; } catch (IOException ex) { loadJdbcProperties = false; } finally { IOUtils.closeQuietly(inputStream); } } } private DataSourceConfig() { } }
workspace/glaf-core/src/main/java/com/glaf/core/config/DataSourceConfig.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.glaf.core.config; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.SQLException; import java.util.*; import javax.naming.InitialContext; import javax.sql.DataSource; import org.apache.commons.dbcp.BasicDataSource; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.Resource; import com.glaf.core.config.SystemProperties; import com.glaf.core.context.ContextFactory; import com.glaf.core.util.Constants; import com.glaf.core.util.DBUtils; import com.glaf.core.util.PropertiesUtils; public class DataSourceConfig { protected static String databaseType; protected static Properties databaseTypeMappings = getDefaultDatabaseTypeMappings(); protected static Properties dialetTypeMappings = getDialectMappings(); protected static Properties hibernateDialetTypeMappings = getHibernateDialectMappings(); protected static Properties properties = new Properties(); protected static boolean loadJdbcProperties = false; static { try { reload(); } catch (Exception ex) { } } public static boolean checkConnection() { Connection connection = null; DataSource ds = null; try { if (loadJdbcProperties) { if (StringUtils.isNotEmpty(getJndiName())) { InitialContext ctx = new InitialContext(); ds = (DataSource) ctx.lookup(getJndiName()); connection = ds.getConnection(); } else { BasicDataSource bds = new BasicDataSource(); bds.setDriverClassName(getJdbcDriverClass()); bds.setUrl(getJdbcConnectionURL()); bds.setUsername(getJdbcUsername()); bds.setPassword(getJdbcPassword()); connection = bds.getConnection(); } } else { ds = ContextFactory.getBean("dataSource"); connection = ds.getConnection(); } if (connection != null) { return true; } } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { e.printStackTrace(); } } return false; } public static boolean eq(String key, String value) { if (key != null && value != null) { String x = properties.getProperty(key); if (StringUtils.equals(value, x)) { return true; } } return false; } public static boolean getBoolean(String key) { if (hasObject(key)) { String value = properties.getProperty(key); return Boolean.valueOf(value).booleanValue(); } return false; } public static Connection getConnection() { Connection connection = null; DataSource ds = null; try { if (loadJdbcProperties) { if (StringUtils.isNotEmpty(getJndiName())) { InitialContext ctx = new InitialContext(); ds = (DataSource) ctx.lookup(getJndiName()); connection = ds.getConnection(); } else { BasicDataSource bds = new BasicDataSource(); bds.setDriverClassName(getJdbcDriverClass()); bds.setUrl(getJdbcConnectionURL()); bds.setUsername(getJdbcUsername()); bds.setPassword(getJdbcPassword()); connection = bds.getConnection(); } } else { ds = ContextFactory.getBean("dataSource"); connection = ds.getConnection(); } } catch (Exception ex) { ex.printStackTrace(); } return connection; } public static Connection getConnection(Properties props) { Connection connection = null; DataSource ds = null; try { if (loadJdbcProperties) { if (StringUtils.isNotEmpty(props .getProperty(Environment.DATASOURCE))) { InitialContext ctx = new InitialContext(); ds = (DataSource) ctx.lookup(props .getProperty(Environment.DATASOURCE)); connection = ds.getConnection(); } else { BasicDataSource bds = new BasicDataSource(); bds.setDriverClassName(props .getProperty(Environment.DRIVER)); bds.setUrl(props.getProperty(Environment.URL)); bds.setUsername(props.getProperty(Environment.USER)); bds.setPassword(props.getProperty(Environment.PASS)); connection = bds.getConnection(); } } else { ds = ContextFactory.getBean("dataSource"); connection = ds.getConnection(); } } catch (Exception ex) { ex.printStackTrace(); } return connection; } public static String getDatabaseDialect() { if (getDatabaseType() != null) { return dialetTypeMappings.getProperty(getDatabaseType()); } return null; } public static String getDatabaseType() { if (databaseType == null) { initDatabaseType(); } return databaseType; } public static String getDatabaseType(Connection connection) { if (connection != null) { String databaseProductName = null; try { DatabaseMetaData databaseMetaData = connection.getMetaData(); databaseProductName = databaseMetaData.getDatabaseProductName(); } catch (SQLException ex) { ex.printStackTrace(); throw new RuntimeException(ex); } String dbType = databaseTypeMappings .getProperty(databaseProductName); if (dbType == null) { throw new RuntimeException( "couldn't deduct database type from database product name '" + databaseProductName + "'"); } return dbType; } return null; } protected static Properties getDefaultDatabaseTypeMappings() { Properties databaseTypeMappings = new Properties(); databaseTypeMappings.setProperty("H2", "h2"); databaseTypeMappings.setProperty("MySQL", "mysql"); databaseTypeMappings.setProperty("Oracle", "oracle"); databaseTypeMappings.setProperty("PostgreSQL", "postgresql"); databaseTypeMappings.setProperty("Microsoft SQL Server", "sqlserver"); databaseTypeMappings.setProperty("DB2", "db2"); databaseTypeMappings.setProperty("DB2/NT", "db2"); databaseTypeMappings.setProperty("DB2/NT64", "db2"); databaseTypeMappings.setProperty("DB2 UDP", "db2"); databaseTypeMappings.setProperty("DB2/LINUX", "db2"); databaseTypeMappings.setProperty("DB2/LINUX390", "db2"); databaseTypeMappings.setProperty("DB2/LINUXZ64", "db2"); databaseTypeMappings.setProperty("DB2/400 SQL", "db2"); databaseTypeMappings.setProperty("DB2/6000", "db2"); databaseTypeMappings.setProperty("DB2 UDB iSeries", "db2"); databaseTypeMappings.setProperty("DB2/AIX64", "db2"); databaseTypeMappings.setProperty("DB2/HPUX", "db2"); databaseTypeMappings.setProperty("DB2/HP64", "db2"); databaseTypeMappings.setProperty("DB2/SUN", "db2"); databaseTypeMappings.setProperty("DB2/SUN64", "db2"); databaseTypeMappings.setProperty("DB2/PTX", "db2"); databaseTypeMappings.setProperty("DB2/2", "db2"); return databaseTypeMappings; } protected static Properties getDialectMappings() { Properties dialectMappings = new Properties(); dialectMappings.setProperty("h2", "com.glaf.core.dialect.H2Dialect"); dialectMappings.setProperty("mysql", "com.glaf.core.dialect.MySQLDialect"); dialectMappings.setProperty("oracle", "com.glaf.core.dialect.OracleDialect"); dialectMappings.setProperty("postgresql", "com.glaf.core.dialect.PostgreSQLDialect"); dialectMappings.setProperty("sqlserver", "com.glaf.core.dialect.SQLServerDialect"); dialectMappings.setProperty("db2", "com.glaf.core.dialect.DB2Dialect"); return dialectMappings; } public static double getDouble(String key) { if (hasObject(key)) { String value = properties.getProperty(key); return Double.valueOf(value).doubleValue(); } return 0; } public static String getHibernateDialect() { if (getDatabaseType() != null) { return hibernateDialetTypeMappings.getProperty(getDatabaseType()); } return null; } protected static Properties getHibernateDialectMappings() { Properties dialectMappings = new Properties(); dialectMappings.setProperty("h2", "org.hibernate.dialect.H2Dialect"); dialectMappings.setProperty("mysql", "org.hibernate.dialect.MySQL5Dialect"); dialectMappings.setProperty("oracle", "org.hibernate.dialect.Oracle10gDialect"); dialectMappings.setProperty("postgresql", "org.hibernate.dialect.PostgreSQLDialect"); dialectMappings.setProperty("sqlserver", "org.hibernate.dialect.SQLServerDialect"); dialectMappings.setProperty("db2", "org.hibernate.dialect.DB2Dialect"); return dialectMappings; } public static int getInt(String key) { if (hasObject(key)) { String value = properties.getProperty(key); return Integer.valueOf(value).intValue(); } return 0; } public static String getJdbcConnectionURL() { return getString(Environment.URL); } public static String getJdbcDriverClass() { return getString(Environment.DRIVER); } public static String getJdbcPassword() { return getString(Environment.PASS); } public static String getJdbcUsername() { return getString(Environment.USER); } public static String getJndiName() { return getString(Environment.DATASOURCE); } public static long getLong(String key) { if (hasObject(key)) { String value = properties.getProperty(key); return Long.valueOf(value).longValue(); } return 0; } public static Properties getProperties() { return properties; } public static String getString(String key) { if (hasObject(key)) { String value = properties.getProperty(key); if (value == null) { value = properties.getProperty(key.toUpperCase()); } return value; } return null; } public static boolean hasObject(String key) { if (properties == null || key == null) { return false; } String value = properties.getProperty(key); if (value != null) { return true; } return false; } public static void initDatabaseType() { Connection connection = null; DataSource ds = null; try { if (loadJdbcProperties) { if (StringUtils.isNotEmpty(getJndiName())) { InitialContext ctx = new InitialContext(); ds = (DataSource) ctx.lookup(getJndiName()); connection = ds.getConnection(); } else { BasicDataSource bds = new BasicDataSource(); bds.setDriverClassName(getJdbcDriverClass()); bds.setUrl(getJdbcConnectionURL()); bds.setUsername(getJdbcUsername()); bds.setPassword(getJdbcPassword()); connection = bds.getConnection(); } } else { ds = ContextFactory.getBean("dataSource"); connection = ds.getConnection(); } if (connection != null) { DatabaseMetaData databaseMetaData = connection.getMetaData(); String databaseProductName = databaseMetaData .getDatabaseProductName(); System.out.println("database product name: '" + databaseProductName + "'"); databaseType = databaseTypeMappings .getProperty(databaseProductName); if (databaseType == null) { throw new RuntimeException( "couldn't deduct database type from database product name '" + databaseProductName + "'"); } System.out.println("using database type: " + databaseType); if (SystemProperties.getBoolean("hibernate.cfg.update")) { reconfigHibernate(); } } } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (connection != null) { connection.close(); } } catch (SQLException e) { e.printStackTrace(); } } } public static boolean isBooleanDatabase() { String databaseType = getDatabaseType(); if (DBUtils.POSTGRESQL.equalsIgnoreCase(databaseType)) { return true; } return false; } protected static void reconfigHibernate() { Resource resource = new ClassPathResource("/hibernate.properties"); try { Properties p = PropertiesUtils.loadProperties(resource .getInputStream()); if (StringUtils.isNotEmpty(getJndiName())) { p.put("hibernate.connection.datasource", getJndiName()); p.remove("hibernate.connection.driver_class"); p.remove("hibernate.connection.url"); p.remove("hibernate.connection.username"); p.remove("hibernate.connection.password"); p.remove("hibernate.connection.provider_class"); p.remove("hibernate.connection.pool_size"); p.remove("hibernate.connection.autocommit"); p.remove("hibernate.c3p0.max_size"); p.remove("hibernate.c3p0.min_size"); p.remove("hibernate.c3p0.timeout"); p.remove("hibernate.c3p0.max_statements"); p.remove("hibernate.c3p0.acquire_increment"); p.remove("hibernate.c3p0.idle_test_period"); p.remove("hibernate.c3p0.validate"); } else { if (getJdbcDriverClass() != null) { p.put("hibernate.connection.driver_class", getJdbcDriverClass()); } if (getJdbcConnectionURL() != null) { p.put("hibernate.connection.url", getJdbcConnectionURL()); } if (getJdbcUsername() != null) { p.put("hibernate.connection.username", getJdbcUsername()); } if (getJdbcPassword() != null) { p.put("hibernate.connection.password", getJdbcPassword()); } else { p.put("hibernate.connection.password", ""); } p.put("hibernate.connection.provider_class", "org.hibernate.connection.C3P0ConnectionProvider"); p.put("hibernate.connection.autocommit", "true"); p.put("hibernate.c3p0.max_size", "50"); p.put("hibernate.c3p0.min_size", "5"); p.put("hibernate.c3p0.timeout", "5000"); p.put("hibernate.c3p0.max_statements", "100"); p.put("hibernate.c3p0.acquire_increment", "2"); p.put("hibernate.c3p0.idle_test_period", "3000"); p.put("hibernate.c3p0.validate", "false"); p.remove("hibernate.connection.datasource"); } if (getHibernateDialect() != null) { p.put("hibernate.dialect", getHibernateDialect()); } Map<String, String> treeMap = new TreeMap<String, String>(); Set<String> keys = p.stringPropertyNames(); for (String key : keys) { treeMap.put(key, p.getProperty(key)); } PropertiesUtils.save(resource, treeMap); } catch (IOException ex) { System.out.println("hibernate.propertiesļ"); } } public static void reload() { synchronized (DataSourceConfig.class) { InputStream inputStream = null; try { String filename = SystemProperties.getConfigRootPath() + Constants.DEFAULT_JDBC_CONFIG; Resource resource = new FileSystemResource(filename); inputStream = new FileInputStream(resource.getFile() .getAbsolutePath()); properties.clear(); Properties p = PropertiesUtils.loadProperties(inputStream); if (p != null) { Enumeration<?> e = p.keys(); while (e.hasMoreElements()) { String key = (String) e.nextElement(); String value = p.getProperty(key); properties.setProperty(key, value); } } inputStream.close(); inputStream = null; if (properties.getProperty("type") != null) { databaseType = properties.getProperty("type"); } loadJdbcProperties = true; } catch (IOException ex) { loadJdbcProperties = false; } finally { IOUtils.closeQuietly(inputStream); } } } private DataSourceConfig() { } }
update
workspace/glaf-core/src/main/java/com/glaf/core/config/DataSourceConfig.java
update
Java
apache-2.0
4f205e1c9712b0d1785ffa1e01e2ac1305044d92
0
andstatus/andstatus,andstatus/andstatus,andstatus/andstatus
/* * Copyright (c) 2016 yvolk (Yuri Volkov), http://yurivolkov.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.andstatus.app.net.http; import com.github.scribejava.core.builder.ServiceBuilder; import com.github.scribejava.core.exceptions.OAuthException; import com.github.scribejava.core.httpclient.jdk.JDKHttpClientConfig; import com.github.scribejava.core.model.OAuth2AccessToken; import com.github.scribejava.core.model.OAuthConstants; import com.github.scribejava.core.model.OAuthRequest; import com.github.scribejava.core.model.Response; import com.github.scribejava.core.model.Verb; import com.github.scribejava.core.oauth.OAuth20Service; import org.andstatus.app.context.MyPreferences; import org.andstatus.app.util.FileUtils; import org.andstatus.app.util.MyLog; import org.json.JSONException; import org.json.JSONObject; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.HttpURLConnection; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ExecutionException; import cz.msebera.android.httpclient.HttpEntity; import oauth.signpost.OAuthConsumer; import oauth.signpost.OAuthProvider; /** * @author [email protected] */ public class HttpConnectionOAuth2JavaNet extends HttpConnectionOAuthJavaNet { public static final String OAUTH_SCOPES = "read write follow"; @Override public void registerClient(String path) throws ConnectionException { String logmsg = "registerClient; for " + data.originUrl + "; URL='" + pathToUrlString(path) + "'"; MyLog.v(this, logmsg); data.oauthClientKeys.clear(); try { JSONObject params = new JSONObject(); params.put("client_name", HttpConnection.USER_AGENT); params.put("redirect_uris", HttpConnection.CALLBACK_URI.toString()); params.put("scopes", OAUTH_SCOPES); params.put("website", "http://andstatus.org"); JSONObject jso = postRequest(path, params); String consumerKey = jso.getString("client_id"); String consumerSecret = jso.getString("client_secret"); data.oauthClientKeys.setConsumerKeyAndSecret(consumerKey, consumerSecret); } catch (IOException e) { MyLog.i(this, logmsg, e); } catch (JSONException e) { MyLog.i(this, logmsg, e); } if (data.oauthClientKeys.areKeysPresent()) { MyLog.v(this, "Completed " + logmsg); } else { throw ConnectionException.fromStatusCodeAndHost(ConnectionException.StatusCode.NO_CREDENTIALS_FOR_HOST, "No client keys for the host yet; " + logmsg, data.originUrl); } } @Override protected void postRequest(HttpReadResult result) throws ConnectionException { if (data.areOAuthClientKeysPresent()) { postRequestOauth(result); } else { super.postRequest(result); } } private void postRequestOauth(HttpReadResult result) throws ConnectionException { try { OAuth20Service service = getService(false); final OAuthRequest request = new OAuthRequest(Verb.POST, result.getUrlObj().toString()); if (result.getFormParams().has(HttpConnection.KEY_MEDIA_PART_URI)) { HttpEntity httpEntity = HttpConnectionApacheCommon.multiPartFormEntity(result.getFormParams()); request.addHeader(httpEntity.getContentType().getName(), httpEntity.getContentType().getValue()); request.setPayload(httpEntityToBytes(httpEntity)); } else { Iterator<String> iterator = result.getFormParams().keys(); while (iterator.hasNext()) { String key = iterator.next(); request.addBodyParameter(key, result.getFormParams().optString(key)); } } signRequest(request, service, false); final Response response = service.execute(request); result.setStatusCode(response.getCode()); switch(result.getStatusCode()) { case OK: result.strResponse = HttpConnectionUtils.readStreamToString(response.getStream()); break; default: result.strResponse = HttpConnectionUtils.readStreamToString(response.getStream()); throw result.getExceptionFromJsonErrorResponse(); } } catch (IOException | ExecutionException | OAuthException e) { result.e1 = e; } catch (InterruptedException e) { Thread.currentThread().interrupt(); result.e1 = e; } } byte[] httpEntityToBytes(HttpEntity httpEntity) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); httpEntity.writeTo(out); out.flush(); return out.toByteArray(); } @Override protected void getRequest(HttpReadResult result) throws ConnectionException { String method = "getRequest; "; StringBuilder logBuilder = new StringBuilder(method); try { logBuilder.append("URL='" + result.getUrl() + "';"); OAuth20Service service = getService(false); OAuthRequest request; boolean redirected = false; boolean stop = false; do { request = new OAuthRequest(Verb.GET, result.getUrlObj().toString()); if (result.authenticate) { signRequest(request, service, redirected); } Response response = service.execute(request); result.setStatusCode(response.getCode()); switch(result.getStatusCode()) { case OK: if (result.fileResult != null) { FileUtils.readStreamToFile(response.getStream(), result.fileResult); } else { result.strResponse = HttpConnectionUtils.readStreamToString(response.getStream()); } stop = true; break; case MOVED: redirected = true; result.setUrl(response.getHeader("Location").replace("%3F", "?")); String logMsg3 = (result.redirected ? "Following redirect to " : "Not redirected to ") + "'" + result.getUrl() + "'"; logBuilder.append(logMsg3 + "; "); MyLog.v(this, method + logMsg3); if (MyLog.isVerboseEnabled()) { StringBuilder message = new StringBuilder(method + "Headers: "); for (Map.Entry<String, String> entry : response.getHeaders().entrySet()) { message.append(entry.getKey() +": " + entry.getValue() + ";\n"); } MyLog.v(this, message.toString()); } // TODO: ?! ...disconnect(); break; default: result.strResponse = HttpConnectionUtils.readStreamToString(response.getStream()); stop = result.fileResult == null || !result.authenticate; if (!stop) { result.authenticate = false; String logMsg4 = "Retrying without authentication connection to '" + result.getUrl() + "'"; logBuilder.append(logMsg4 + "; "); MyLog.v(this, method + logMsg4); } break; } } while (!stop); } catch(ConnectionException e) { throw e; } catch(IOException | ExecutionException | OAuthException e) { throw new ConnectionException(logBuilder.toString(), e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new ConnectionException(logBuilder.toString(), e); } } @Override public OAuth20Service getService(boolean redirect) { final JDKHttpClientConfig clientConfig = JDKHttpClientConfig.defaultConfig(); clientConfig.setConnectTimeout(MyPreferences.getConnectionTimeoutMs()); clientConfig.setReadTimeout(2*MyPreferences.getConnectionTimeoutMs()); clientConfig.setFollowRedirects(false); final ServiceBuilder serviceBuilder = new ServiceBuilder() .apiKey(data.oauthClientKeys.getConsumerKey()) .apiSecret(data.oauthClientKeys.getConsumerSecret()) .httpClientConfig(clientConfig); if (redirect) { serviceBuilder.callback(HttpConnection.CALLBACK_URI.toString()); } return serviceBuilder.build(new OAuthApi20(this)); } private void signRequest(OAuthRequest request, OAuth20Service service, boolean redirected) throws ConnectionException { if (!getCredentialsPresent()) { return; } try { if (data.originUrl.getHost().contentEquals(data.urlForUserToken.getHost())) { OAuth2AccessToken token = new OAuth2AccessToken(getUserToken(), getUserSecret()); service.signRequest(token, request); } else { // See http://tools.ietf.org/html/draft-prodromou-dialback-00 if (redirected) { OAuth2AccessToken token = new OAuth2AccessToken("", null); service.signRequest(token, request); } else { request.addParameter("Authorization", "Dialback"); request.addParameter("host", data.urlForUserToken.getHost()); request.addParameter("token", getUserToken()); MyLog.v(this, "Dialback authorization at " + data.originUrl + "; urlForUserToken=" + data.urlForUserToken + "; token=" + getUserToken()); OAuth2AccessToken token = new OAuth2AccessToken(getUserToken(), null); service.signRequest(token, request); } } } catch (Exception e) { throw new ConnectionException(e); } } @Override protected void signConnection(HttpURLConnection conn, OAuthConsumer consumer, boolean redirected) throws ConnectionException { if (!getCredentialsPresent()) { return; } try { OAuth2AccessToken token; if (data.originUrl.getHost().contentEquals(data.urlForUserToken.getHost())) { token = new OAuth2AccessToken(getUserToken(), getUserSecret()); } else { if (redirected) { token = new OAuth2AccessToken("", null); } else { conn.setRequestProperty("Authorization", "Dialback"); conn.setRequestProperty("host", data.urlForUserToken.getHost()); conn.setRequestProperty("token", getUserToken()); MyLog.v(this, "Dialback authorization at " + data.originUrl + "; urlForUserToken=" + data.urlForUserToken + "; token=" + getUserToken()); token = new OAuth2AccessToken(getUserToken(), null); } } conn.setRequestProperty(OAuthConstants.ACCESS_TOKEN, token.getAccessToken()); } catch (Exception e) { throw new ConnectionException(e); } } @Override public OAuthConsumer getConsumer() { return null; } @Override public OAuthProvider getProvider() throws ConnectionException { return null; } @Override public boolean isOAuth2() { return true; } }
app/src/main/java/org/andstatus/app/net/http/HttpConnectionOAuth2JavaNet.java
/* * Copyright (c) 2016 yvolk (Yuri Volkov), http://yurivolkov.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.andstatus.app.net.http; import com.github.scribejava.core.builder.ServiceBuilder; import com.github.scribejava.core.exceptions.OAuthException; import com.github.scribejava.core.httpclient.jdk.JDKHttpClientConfig; import com.github.scribejava.core.model.OAuth2AccessToken; import com.github.scribejava.core.model.OAuthConstants; import com.github.scribejava.core.model.OAuthRequest; import com.github.scribejava.core.model.Response; import com.github.scribejava.core.model.Verb; import com.github.scribejava.core.oauth.OAuth20Service; import org.andstatus.app.context.MyPreferences; import org.andstatus.app.util.FileUtils; import org.andstatus.app.util.MyLog; import org.json.JSONException; import org.json.JSONObject; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.HttpURLConnection; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ExecutionException; import cz.msebera.android.httpclient.HttpEntity; import oauth.signpost.OAuthConsumer; import oauth.signpost.OAuthProvider; /** * @author [email protected] */ public class HttpConnectionOAuth2JavaNet extends HttpConnectionOAuthJavaNet { public static final String OAUTH_SCOPES = "read write follow"; @Override public void registerClient(String path) throws ConnectionException { String logmsg = "registerClient; for " + data.originUrl + "; URL='" + pathToUrlString(path) + "'"; MyLog.v(this, logmsg); data.oauthClientKeys.clear(); try { JSONObject params = new JSONObject(); params.put("client_name", HttpConnection.USER_AGENT); params.put("redirect_uris", HttpConnection.CALLBACK_URI.toString()); params.put("scopes", OAUTH_SCOPES); params.put("website", "http://andstatus.org"); JSONObject jso = postRequest(path, params); String consumerKey = jso.getString("client_id"); String consumerSecret = jso.getString("client_secret"); data.oauthClientKeys.setConsumerKeyAndSecret(consumerKey, consumerSecret); } catch (IOException e) { MyLog.i(this, logmsg, e); } catch (JSONException e) { MyLog.i(this, logmsg, e); } if (data.oauthClientKeys.areKeysPresent()) { MyLog.v(this, "Completed " + logmsg); } else { throw ConnectionException.fromStatusCodeAndHost(ConnectionException.StatusCode.NO_CREDENTIALS_FOR_HOST, "No client keys for the host yet; " + logmsg, data.originUrl); } } @Override protected void postRequest(HttpReadResult result) throws ConnectionException { if (data.areOAuthClientKeysPresent()) { postRequestOauth(result); } else { super.postRequest(result); } } private void postRequestOauth(HttpReadResult result) throws ConnectionException { try { OAuth20Service service = getService(false); final OAuthRequest request = new OAuthRequest(Verb.POST, result.getUrlObj().toString()); if (result.getFormParams().has(HttpConnection.KEY_MEDIA_PART_URI)) { HttpEntity httpEntity = HttpConnectionApacheCommon.multiPartFormEntity(result.getFormParams()); request.addHeader(httpEntity.getContentType().getName(), httpEntity.getContentType().getValue()); request.setPayload(httpEntityToBytes(httpEntity)); } else { Iterator<String> iterator = result.getFormParams().keys(); while (iterator.hasNext()) { String key = iterator.next(); request.addBodyParameter(key, result.getFormParams().optString(key)); } } signRequest(request, service, false); final Response response = service.execute(request); result.setStatusCode(response.getCode()); switch(result.getStatusCode()) { case OK: result.strResponse = HttpConnectionUtils.readStreamToString(response.getStream()); break; default: result.strResponse = HttpConnectionUtils.readStreamToString(response.getStream()); throw result.getExceptionFromJsonErrorResponse(); } } catch (IOException | ExecutionException e) { result.e1 = e; } catch (InterruptedException e) { Thread.currentThread().interrupt(); result.e1 = e; } } byte[] httpEntityToBytes(HttpEntity httpEntity) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); httpEntity.writeTo(out); out.flush(); return out.toByteArray(); } @Override protected void getRequest(HttpReadResult result) throws ConnectionException { String method = "getRequest; "; StringBuilder logBuilder = new StringBuilder(method); try { logBuilder.append("URL='" + result.getUrl() + "';"); OAuth20Service service = getService(false); OAuthRequest request; boolean redirected = false; boolean stop = false; do { request = new OAuthRequest(Verb.GET, result.getUrlObj().toString()); if (result.authenticate) { signRequest(request, service, redirected); } Response response = service.execute(request); result.setStatusCode(response.getCode()); switch(result.getStatusCode()) { case OK: if (result.fileResult != null) { FileUtils.readStreamToFile(response.getStream(), result.fileResult); } else { result.strResponse = HttpConnectionUtils.readStreamToString(response.getStream()); } stop = true; break; case MOVED: redirected = true; result.setUrl(response.getHeader("Location").replace("%3F", "?")); String logMsg3 = (result.redirected ? "Following redirect to " : "Not redirected to ") + "'" + result.getUrl() + "'"; logBuilder.append(logMsg3 + "; "); MyLog.v(this, method + logMsg3); if (MyLog.isVerboseEnabled()) { StringBuilder message = new StringBuilder(method + "Headers: "); for (Map.Entry<String, String> entry : response.getHeaders().entrySet()) { message.append(entry.getKey() +": " + entry.getValue() + ";\n"); } MyLog.v(this, message.toString()); } // TODO: ?! ...disconnect(); break; default: result.strResponse = HttpConnectionUtils.readStreamToString(response.getStream()); stop = result.fileResult == null || !result.authenticate; if (!stop) { result.authenticate = false; String logMsg4 = "Retrying without authentication connection to '" + result.getUrl() + "'"; logBuilder.append(logMsg4 + "; "); MyLog.v(this, method + logMsg4); } break; } } while (!stop); } catch(ConnectionException e) { throw e; } catch(IOException | ExecutionException | OAuthException e) { throw new ConnectionException(logBuilder.toString(), e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new ConnectionException(logBuilder.toString(), e); } } @Override public OAuth20Service getService(boolean redirect) { final JDKHttpClientConfig clientConfig = JDKHttpClientConfig.defaultConfig(); clientConfig.setConnectTimeout(MyPreferences.getConnectionTimeoutMs()); clientConfig.setReadTimeout(2*MyPreferences.getConnectionTimeoutMs()); clientConfig.setFollowRedirects(false); final ServiceBuilder serviceBuilder = new ServiceBuilder() .apiKey(data.oauthClientKeys.getConsumerKey()) .apiSecret(data.oauthClientKeys.getConsumerSecret()) .httpClientConfig(clientConfig); if (redirect) { serviceBuilder.callback(HttpConnection.CALLBACK_URI.toString()); } return serviceBuilder.build(new OAuthApi20(this)); } private void signRequest(OAuthRequest request, OAuth20Service service, boolean redirected) throws ConnectionException { if (!getCredentialsPresent()) { return; } try { if (data.originUrl.getHost().contentEquals(data.urlForUserToken.getHost())) { OAuth2AccessToken token = new OAuth2AccessToken(getUserToken(), getUserSecret()); service.signRequest(token, request); } else { // See http://tools.ietf.org/html/draft-prodromou-dialback-00 if (redirected) { OAuth2AccessToken token = new OAuth2AccessToken("", null); service.signRequest(token, request); } else { request.addParameter("Authorization", "Dialback"); request.addParameter("host", data.urlForUserToken.getHost()); request.addParameter("token", getUserToken()); MyLog.v(this, "Dialback authorization at " + data.originUrl + "; urlForUserToken=" + data.urlForUserToken + "; token=" + getUserToken()); OAuth2AccessToken token = new OAuth2AccessToken(getUserToken(), null); service.signRequest(token, request); } } } catch (Exception e) { throw new ConnectionException(e); } } @Override protected void signConnection(HttpURLConnection conn, OAuthConsumer consumer, boolean redirected) throws ConnectionException { if (!getCredentialsPresent()) { return; } try { OAuth2AccessToken token; if (data.originUrl.getHost().contentEquals(data.urlForUserToken.getHost())) { token = new OAuth2AccessToken(getUserToken(), getUserSecret()); } else { if (redirected) { token = new OAuth2AccessToken("", null); } else { conn.setRequestProperty("Authorization", "Dialback"); conn.setRequestProperty("host", data.urlForUserToken.getHost()); conn.setRequestProperty("token", getUserToken()); MyLog.v(this, "Dialback authorization at " + data.originUrl + "; urlForUserToken=" + data.urlForUserToken + "; token=" + getUserToken()); token = new OAuth2AccessToken(getUserToken(), null); } } conn.setRequestProperty(OAuthConstants.ACCESS_TOKEN, token.getAccessToken()); } catch (Exception e) { throw new ConnectionException(e); } } @Override public OAuthConsumer getConsumer() { return null; } @Override public OAuthProvider getProvider() throws ConnectionException { return null; } @Override public boolean isOAuth2() { return true; } }
Fixed crash on com.github.scribejava.core.exceptions.OAuthException
app/src/main/java/org/andstatus/app/net/http/HttpConnectionOAuth2JavaNet.java
Fixed crash on com.github.scribejava.core.exceptions.OAuthException
Java
apache-2.0
98954e8ff3c0e7de50cb4f425ad558e8670065db
0
GwtMaterialDesign/gwt-material,GwtMaterialDesign/gwt-material,guibertjulien/gwt-material,GwtMaterialDesign/gwt-material,marcrh/gwt-material,marcrh/gwt-material,marcrh/gwt-material,guibertjulien/gwt-material,guibertjulien/gwt-material
/* * #%L * GwtMaterial * %% * Copyright (C) 2015 - 2016 GwtMaterialDesign * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package gwt.material.design.client.ui; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.OptionElement; import com.google.gwt.dom.client.SelectElement; import com.google.gwt.event.logical.shared.ValueChangeEvent; import com.google.gwt.i18n.client.HasDirection.Direction; import com.google.gwt.user.client.ui.FormPanel; import com.google.gwt.user.client.ui.HasConstrainedValue; import com.google.gwt.user.client.ui.ListBox; import gwt.material.design.client.base.*; import gwt.material.design.client.base.mixin.ToggleStyleMixin; import gwt.material.design.client.ui.html.Label; import gwt.material.design.client.ui.html.Option; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import static gwt.material.design.client.js.JsMaterialElement.$; //@formatter:off /** * <p>Material ListBox is another dropdown component that will set / get the value depends on the selected index * <h3>UiBinder Usage:</h3> * <p> * <pre> * {@code * <m:MaterialListBox ui:field="lstBox" /> * } * </pre> * <h3>Java Usage:</h3> * <p> * <pre> * {@code * // functions * lstBox.setSelectedIndex(2); * lstBox.getSelectedIndex(); * lstBox.addValueChangeHandler(handler); * } * </pre> * </p> * * @author kevzlou7979 * @author Ben Dol * @see <a href="http://gwtmaterialdesign.github.io/gwt-material-demo/#!forms">Material ListBox</a> */ //@formatter:on public class MaterialListValueBox<T> extends AbstractValueWidget<T> implements HasPlaceholder, HasConstrainedValue<T> { private final ListBox listBox = new ListBox(); private final Label lblName = new Label(); private boolean initialized; // By default the key is generated using toString private KeyFactory<T, String> keyFactory = Object::toString; protected final List<T> values = new ArrayList<>(); private ToggleStyleMixin<ListBox> toggleOldMixin; public MaterialListValueBox() { super(Document.get().createDivElement(), "input-field"); add(listBox); add(lblName); toggleOldMixin = new ToggleStyleMixin<>(listBox, "browser-default"); } @Override protected void onLoad() { super.onLoad(); $(listBox.getElement()).off("change"); $(listBox.getElement()).change((e, param) -> { try { ValueChangeEvent.fire(this, getValue()); } catch (IndexOutOfBoundsException ex) { GWT.log("ListBox value change handler threw an exception.", ex); } return true; }); initializeMaterial(); } @Override protected void onUnload() { super.onUnload(); MaterialToast.fireToast("UnLoaded"); $(listBox.getElement()).material_select("destroy"); } @Override public void setPlaceholder(String placeholder) { lblName.setText(placeholder); if (initialized && placeholder != null) { initializeMaterial(); } } @Override public String getPlaceholder() { return lblName.getText(); } public OptionElement getOptionElement(int index) { return getSelectElement().getOptions().getItem(index); } /** * Removes all items from the list box. */ @Override public void clear() { values.clear(); listBox.clear(); if (initialized) { // reinitialize initializeMaterial(); } } protected SelectElement getSelectElement() { return listBox.getElement().cast(); } /** * Initializes the Materialize CSS list box. Should be * called every time the contents of the list box * changes, to keep the Materialize CSS design updated. */ protected void initializeMaterial() { $(listBox.getElement()).material_select(); initialized = true; } /** * Sets whether this list allows multiple selections. * * @param multipleSelect <code>true</code> to allow multiple selections */ public void setMultipleSelect(boolean multipleSelect) { listBox.setMultipleSelect(multipleSelect); if (initialized) { initializeMaterial(); } } /** * Gets whether this list allows multiple selection. * * @return <code>true</code> if multiple selection is allowed */ public boolean isMultipleSelect() { return listBox.isMultipleSelect(); } public void setEmptyPlaceHolder(String value) { listBox.insertItem(value, 0); getOptionElement(0).setDisabled(true); if (initialized) { initializeMaterial(); } } @Override public void setAcceptableValues(Collection<T> values) { this.values.clear(); clear(); for (T value : values) { addItem(value); } } @Override public T getValue() { if (getSelectedIndex() != -1) { return values.get(getSelectedIndex()); } return null; } @Override public void setValue(T value) { setValue(value, true); } @Override public void setValue(T value, boolean fireEvents) { int index = values.indexOf(value); if (index > 0) { T before = getValue(); setSelectedIndex(index); if (fireEvents) { ValueChangeEvent.fireIfNotEqual(this, before, value); } } } public boolean isOld() { return toggleOldMixin.isOn(); } public void setOld(boolean old) { toggleOldMixin.setOn(old); } // delegate methods public void add(T value) { addItem(value); } /** * Inserts an item into the list box, specifying its direction and an * initial value for the item. If the index is less than zero, or greater * than or equal to the length of the list, then the item will be appended * to the end of the list. * * @param item the text of the item to be inserted * @param dir the item's direction. If {@code null}, the item is displayed * in the widget's overall direction, or, if a direction * estimator has been set, in the item's estimated direction. * @param value the item's value, to be submitted if it is part of a * {@link FormPanel}. * @param index the index at which to insert it */ public void insertItem(T item, Direction dir, String value, int index) { values.add(index, item); listBox.insertItem(keyFactory.generateKey(item), dir, value, index); if (initialized) { // reinitialize initializeMaterial(); } } /** * Sets the value associated with the item at a given index. This value can * be used for any purpose, but is also what is passed to the server when * the list box is submitted as part of a {@link FormPanel}. * * @param index the index of the item to be set * @param value the item's new value; cannot be <code>null</code> * @throws IndexOutOfBoundsException if the index is out of range */ public void setValue(int index, String value) { listBox.setValue(index, value); if (initialized) { // reinitialize initializeMaterial(); } } @Override public void setTitle(String title) { listBox.setTitle(title); if (initialized) { // reinitialize initializeMaterial(); } } /** * Adds an item to the list box, specifying its direction. This method has * the same effect as * <p> * <pre> * addItem(item, dir, item) * </pre> * * @param item the text of the item to be added * @param dir the item's direction */ public void addItem(T item, Direction dir) { values.add(item); listBox.addItem(keyFactory.generateKey(item), dir); if (initialized) { // reinitialize initializeMaterial(); } } /** * Adds an item to the list box. This method has the same effect as * <p> * <pre> * addItem(item, item) * </pre> * * @param item the text of the item to be added */ public void addItem(T item) { values.add(item); listBox.addItem(keyFactory.generateKey(item)); if (initialized) { // reinitialize initializeMaterial(); } } /** * Adds an item to the list box, specifying an initial value for the item. * * @param item the text of the item to be added * @param value the item's value, to be submitted if it is part of a * {@link FormPanel}; cannot be <code>null</code> */ public void addItem(T item, String value) { values.add(item); listBox.addItem(keyFactory.generateKey(item), value); if (initialized) { // reinitialize initializeMaterial(); } } /** * Adds an item to the list box, specifying its direction and an initial * value for the item. * * @param item the text of the item to be added * @param dir the item's direction * @param value the item's value, to be submitted if it is part of a * {@link FormPanel}; cannot be <code>null</code> */ public void addItem(T item, Direction dir, String value) { values.add(item); listBox.addItem(keyFactory.generateKey(item), dir, value); if (initialized) { // reinitialize initializeMaterial(); } } /** * Inserts an item into the list box. Has the same effect as * <p> * <pre> * insertItem(item, item, index) * </pre> * * @param item the text of the item to be inserted * @param index the index at which to insert it */ public void insertItem(T item, int index) { values.add(index, item); listBox.insertItem(keyFactory.generateKey(item), index); if (initialized) { // reinitialize initializeMaterial(); } } /** * Inserts an item into the list box, specifying its direction. Has the same * effect as * <p> * <pre> * insertItem(item, dir, item, index) * </pre> * * @param item the text of the item to be inserted * @param dir the item's direction * @param index the index at which to insert it */ public void insertItem(T item, Direction dir, int index) { values.add(index, item); listBox.insertItem(keyFactory.generateKey(item), dir, index); if (initialized) { // reinitialize initializeMaterial(); } } /** * Inserts an item into the list box, specifying an initial value for the * item. Has the same effect as * <p> * <pre> * insertItem(item, null, value, index) * </pre> * * @param item the text of the item to be inserted * @param value the item's value, to be submitted if it is part of a * {@link FormPanel}. * @param index the index at which to insert it */ public void insertItem(T item, String value, int index) { values.add(index, item); listBox.insertItem(keyFactory.generateKey(item), value, index); if (initialized) { // reinitialize initializeMaterial(); } } /** * Sets whether an individual list item is selected. * * @param index the index of the item to be selected or unselected * @param selected <code>true</code> to select the item * @throws IndexOutOfBoundsException if the index is out of range */ public void setItemSelected(int index, boolean selected) { listBox.setItemSelected(index, selected); if (initialized) { // reinitialize initializeMaterial(); } } /** * Sets the text associated with the item at a given index. * * @param index the index of the item to be set * @param text the item's new text * @throws IndexOutOfBoundsException if the index is out of range */ public void setItemText(int index, String text) { listBox.setItemText(index, text); if (initialized) { // reinitialize initializeMaterial(); } } /** * Sets the text associated with the item at a given index. * * @param index the index of the item to be set * @param text the item's new text * @param dir the item's direction. * @throws IndexOutOfBoundsException if the index is out of range */ public void setItemText(int index, String text, Direction dir) { listBox.setItemText(index, text, dir); if (initialized) { // reinitialize initializeMaterial(); } } public void setName(String name) { listBox.setName(name); if (initialized) { // reinitialize initializeMaterial(); } } /** * Sets the currently selected index. * <p> * After calling this method, only the specified item in the list will * remain selected. For a ListBox with multiple selection enabled, see * {@link #setItemSelected(int, boolean)} to select multiple items at a * time. * * @param index the index of the item to be selected */ public void setSelectedIndex(int index) { listBox.setSelectedIndex(index); if (initialized) { // reinitialize initializeMaterial(); } } /** * Sets the number of items that are visible. If only one item is visible, * then the box will be displayed as a drop-down list. * * @param visibleItems the visible item count */ public void setVisibleItemCount(int visibleItems) { listBox.setVisibleItemCount(visibleItems); if (initialized) { // reinitialize initializeMaterial(); } } /** * Gets the number of items present in the list box. * * @return the number of items */ public int getItemCount() { return listBox.getItemCount(); } /** * Gets the text associated with the item at the specified index. * * @param index the index of the item whose text is to be retrieved * @return the text associated with the item * @throws IndexOutOfBoundsException if the index is out of range */ public String getItemText(int index) { return listBox.getItemText(index); } /** * Gets the text for currently selected item. If multiple items are * selected, this method will return the text of the first selected item. * * @return the text for selected item, or {@code null} if none is selected */ public String getSelectedItemText() { return listBox.getSelectedItemText(); } public String getName() { return listBox.getName(); } /** * Gets the currently-selected item. If multiple items are selected, this * method will return the first selected item ({@link #isItemSelected(int)} * can be used to query individual items). * * @return the selected index, or <code>-1</code> if none is selected */ public int getSelectedIndex() { return listBox.getSelectedIndex(); } /** * Gets the value associated with the item at a given index. * * @param index the index of the item to be retrieved * @return the item's associated value * @throws IndexOutOfBoundsException if the index is out of range */ public T getValue(int index) { return values.get(index); } /** * Gets the value for currently selected item. If multiple items are * selected, this method will return the value of the first selected item. * * @return the value for selected item, or {@code null} if none is selected */ public T getSelectedValue() { try { return values.get(getSelectedIndex()); } catch (IndexOutOfBoundsException ex) { return null; } } /** * Gets the number of items that are visible. If only one item is visible, * then the box will be displayed as a drop-down list. * * @return the visible item count */ public int getVisibleItemCount() { return listBox.getVisibleItemCount(); } /** * Determines whether an individual list item is selected. * * @param index the index of the item to be tested * @return <code>true</code> if the item is selected * @throws IndexOutOfBoundsException if the index is out of range */ public boolean isItemSelected(int index) { return listBox.isItemSelected(index); } /** * Removes the item at the specified index. * * @param index the index of the item to be removed * @throws IndexOutOfBoundsException if the index is out of range */ public void removeItem(int index) { values.remove(index); listBox.removeItem(index); if (initialized) { initializeMaterial(); } } // utility methods /** * Returns all selected values of the list box, or empty array if none. * * @return the selected values of the list box */ public String[] getItemsSelected() { List<String> selected = new LinkedList<>(); for (int i = 0; i < listBox.getItemCount(); i++) { if (listBox.isItemSelected(i)) { selected.add(listBox.getValue(i)); } } return selected.toArray(new String[selected.size()]); } /** * Sets the currently selected value. * <p> * After calling this method, only the specified item in the list will * remain selected. For a ListBox with multiple selection enabled, see * {@link #setValueSelected(String, boolean)} to select multiple items at a * time. * * @param value the value of the item to be selected */ public void setSelectedValue(String value) { int idx = getIndex(value); if (idx >= 0) { setSelectedIndex(idx); } } /** * Gets the index of the specified value. * * @param value the value of the item to be found * @return the index of the value */ public int getIndex(String value) { int count = getItemCount(); for (int i = 0; i < count; i++) { if (getValue(i).equals(value)) { return i; } } return -1; } /** * Sets whether an individual list value is selected. * * @param value the value of the item to be selected or unselected * @param selected <code>true</code> to select the item */ public void setValueSelected(String value, boolean selected) { int idx = getIndex(value); if (idx >= 0) { setItemSelected(idx, selected); } } /** * Removes a value from the list box. Nothing is done if the value isn't on * the list box. * * @param value the value to be removed from the list */ public void removeValue(String value) { int idx = getIndex(value); if (idx >= 0) { removeItem(idx); } } @Override public void setEnabled(boolean enabled) { listBox.setEnabled(enabled); if (initialized) { // reinitialize initializeMaterial(); } } /** * Use your own key factory for value keys. */ public void setKeyFactory(KeyFactory<T, String> keyFactory) { this.keyFactory = keyFactory; } }
gwt-material/src/main/java/gwt/material/design/client/ui/MaterialListValueBox.java
/* * #%L * GwtMaterial * %% * Copyright (C) 2015 - 2016 GwtMaterialDesign * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package gwt.material.design.client.ui; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.OptionElement; import com.google.gwt.dom.client.SelectElement; import com.google.gwt.event.logical.shared.ValueChangeEvent; import com.google.gwt.i18n.client.HasDirection.Direction; import com.google.gwt.user.client.ui.FormPanel; import com.google.gwt.user.client.ui.HasConstrainedValue; import com.google.gwt.user.client.ui.ListBox; import gwt.material.design.client.base.AbstractValueWidget; import gwt.material.design.client.base.HasPlaceholder; import gwt.material.design.client.base.KeyFactory; import gwt.material.design.client.base.mixin.ToggleStyleMixin; import gwt.material.design.client.ui.html.Label; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import static gwt.material.design.client.js.JsMaterialElement.$; //@formatter:off /** * <p>Material ListBox is another dropdown component that will set / get the value depends on the selected index * <h3>UiBinder Usage:</h3> * * <pre> * {@code * <m:MaterialListBox ui:field="lstBox" /> * } * </pre> * <h3>Java Usage:</h3> * * <pre> * {@code * // functions * lstBox.setSelectedIndex(2); * lstBox.getSelectedIndex(); * lstBox.addValueChangeHandler(handler); * } * </pre> * </p> * * @author kevzlou7979 * @author Ben Dol * @see <a href="http://gwtmaterialdesign.github.io/gwt-material-demo/#!forms">Material ListBox</a> */ //@formatter:on public class MaterialListValueBox<T> extends AbstractValueWidget<T> implements HasPlaceholder, HasConstrainedValue<T> { private final ListBox listBox = new ListBox(); private final Label lblName = new Label(); // By default the key is generated using toString private KeyFactory<T, String> keyFactory = Object::toString; protected final List<T> values = new ArrayList<>(); private ToggleStyleMixin<ListBox> toggleOldMixin; public MaterialListValueBox() { super(Document.get().createDivElement(), "input-field"); add(listBox); add(lblName); toggleOldMixin = new ToggleStyleMixin<>(listBox, "browser-default"); } @Override protected void onLoad() { super.onLoad(); $(listBox.getElement()).change((e, param) -> { try { ValueChangeEvent.fire(this, getValue()); } catch (IndexOutOfBoundsException ex) { GWT.log("ListBox value change handler threw an exception.", ex); } return true; }); initializeMaterial(); } @Override protected void onUnload() { super.onUnload(); $(listBox.getElement()).material_select("destroy"); } @Override public void setPlaceholder(String placeholder) { lblName.setText(placeholder); } @Override public String getPlaceholder() { return lblName.getText(); } public OptionElement getOptionElement(int index) { return getSelectElement().getOptions().getItem(index); } /** * Removes all items from the list box. */ @Override public void clear() { values.clear(); listBox.clear(); } protected SelectElement getSelectElement() { return listBox.getElement().cast(); } /** * Initializes the Materialize CSS list box. Should be * called every time the contents of the list box * changes, to keep the Materialize CSS design updated. */ protected void initializeMaterial() { $(listBox.getElement()).material_select(); } /** * Sets whether this list allows multiple selections. * * @param multipleSelect <code>true</code> to allow multiple selections */ public void setMultipleSelect(boolean multipleSelect) { listBox.setMultipleSelect(multipleSelect); } /** * Gets whether this list allows multiple selection. * * @return <code>true</code> if multiple selection is allowed */ public boolean isMultipleSelect() { return listBox.isMultipleSelect(); } public void setEmptyPlaceHolder(String value) { listBox.insertItem(value, 0); getOptionElement(0).setDisabled(true); } @Override public void setAcceptableValues(Collection<T> values) { this.values.clear(); clear(); for(T value : values) { addItem(value); } } @Override public T getValue() { if(getSelectedIndex() != -1) { return values.get(getSelectedIndex()); } return null; } @Override public void setValue(T value) { setValue(value, true); } @Override public void setValue(T value, boolean fireEvents) { int index = values.indexOf(value); if(index > 0) { T before = getValue(); setSelectedIndex(index); if (fireEvents) { ValueChangeEvent.fireIfNotEqual(this, before, value); } } } public boolean isOld() { return toggleOldMixin.isOn(); } public void setOld(boolean old) { toggleOldMixin.setOn(old); } // delegate methods public void add(T value) { addItem(value); } /** * Inserts an item into the list box, specifying its direction and an * initial value for the item. If the index is less than zero, or greater * than or equal to the length of the list, then the item will be appended * to the end of the list. * * @param item * the text of the item to be inserted * @param dir * the item's direction. If {@code null}, the item is displayed * in the widget's overall direction, or, if a direction * estimator has been set, in the item's estimated direction. * @param value * the item's value, to be submitted if it is part of a * {@link FormPanel}. * @param index * the index at which to insert it */ public void insertItem(T item, Direction dir, String value, int index) { values.add(index, item); listBox.insertItem(keyFactory.generateKey(item), dir, value, index); } /** * Sets the value associated with the item at a given index. This value can * be used for any purpose, but is also what is passed to the server when * the list box is submitted as part of a {@link FormPanel}. * * @param index * the index of the item to be set * @param value * the item's new value; cannot be <code>null</code> * @throws IndexOutOfBoundsException * if the index is out of range */ public void setValue(int index, String value) { listBox.setValue(index, value); } @Override public void setTitle(String title) { listBox.setTitle(title); } /** * Adds an item to the list box, specifying its direction. This method has * the same effect as * * <pre> * addItem(item, dir, item) * </pre> * * @param item * the text of the item to be added * @param dir * the item's direction */ public void addItem(T item, Direction dir) { values.add(item); listBox.addItem(keyFactory.generateKey(item), dir); } /** * Adds an item to the list box. This method has the same effect as * * <pre> * addItem(item, item) * </pre> * * @param item * the text of the item to be added */ public void addItem(T item) { values.add(item); listBox.addItem(keyFactory.generateKey(item)); } /** * Adds an item to the list box, specifying an initial value for the item. * * @param item * the text of the item to be added * @param value * the item's value, to be submitted if it is part of a * {@link FormPanel}; cannot be <code>null</code> */ public void addItem(T item, String value) { values.add(item); listBox.addItem(keyFactory.generateKey(item), value); } /** * Adds an item to the list box, specifying its direction and an initial * value for the item. * * @param item * the text of the item to be added * @param dir * the item's direction * @param value * the item's value, to be submitted if it is part of a * {@link FormPanel}; cannot be <code>null</code> */ public void addItem(T item, Direction dir, String value) { values.add(item); listBox.addItem(keyFactory.generateKey(item), dir, value); } /** * Inserts an item into the list box. Has the same effect as * * <pre> * insertItem(item, item, index) * </pre> * * @param item * the text of the item to be inserted * @param index * the index at which to insert it */ public void insertItem(T item, int index) { values.add(index, item); listBox.insertItem(keyFactory.generateKey(item), index); } /** * Inserts an item into the list box, specifying its direction. Has the same * effect as * * <pre> * insertItem(item, dir, item, index) * </pre> * * @param item * the text of the item to be inserted * @param dir * the item's direction * @param index * the index at which to insert it */ public void insertItem(T item, Direction dir, int index) { values.add(index, item); listBox.insertItem(keyFactory.generateKey(item), dir, index); } /** * Inserts an item into the list box, specifying an initial value for the * item. Has the same effect as * * <pre> * insertItem(item, null, value, index) * </pre> * * @param item * the text of the item to be inserted * @param value * the item's value, to be submitted if it is part of a * {@link FormPanel}. * @param index * the index at which to insert it */ public void insertItem(T item, String value, int index) { values.add(index, item); listBox.insertItem(keyFactory.generateKey(item), value, index); } /** * Sets whether an individual list item is selected. * * @param index * the index of the item to be selected or unselected * @param selected * <code>true</code> to select the item * @throws IndexOutOfBoundsException * if the index is out of range */ public void setItemSelected(int index, boolean selected) { listBox.setItemSelected(index, selected); } /** * Sets the text associated with the item at a given index. * * @param index * the index of the item to be set * @param text * the item's new text * @throws IndexOutOfBoundsException * if the index is out of range */ public void setItemText(int index, String text) { listBox.setItemText(index, text); } /** * Sets the text associated with the item at a given index. * * @param index * the index of the item to be set * @param text * the item's new text * @param dir * the item's direction. * @throws IndexOutOfBoundsException * if the index is out of range */ public void setItemText(int index, String text, Direction dir) { listBox.setItemText(index, text, dir); } public void setName(String name) { listBox.setName(name); } /** * Sets the currently selected index. * * After calling this method, only the specified item in the list will * remain selected. For a ListBox with multiple selection enabled, see * {@link #setItemSelected(int, boolean)} to select multiple items at a * time. * * @param index * the index of the item to be selected */ public void setSelectedIndex(int index) { listBox.setSelectedIndex(index); } /** * Sets the number of items that are visible. If only one item is visible, * then the box will be displayed as a drop-down list. * * @param visibleItems * the visible item count */ public void setVisibleItemCount(int visibleItems) { listBox.setVisibleItemCount(visibleItems); } /** * Gets the number of items present in the list box. * * @return the number of items */ public int getItemCount() { return listBox.getItemCount(); } /** * Gets the text associated with the item at the specified index. * * @param index * the index of the item whose text is to be retrieved * @return the text associated with the item * @throws IndexOutOfBoundsException * if the index is out of range */ public String getItemText(int index) { return listBox.getItemText(index); } /** * Gets the text for currently selected item. If multiple items are * selected, this method will return the text of the first selected item. * * @return the text for selected item, or {@code null} if none is selected */ public String getSelectedItemText() { return listBox.getSelectedItemText(); } public String getName() { return listBox.getName(); } /** * Gets the currently-selected item. If multiple items are selected, this * method will return the first selected item ({@link #isItemSelected(int)} * can be used to query individual items). * * @return the selected index, or <code>-1</code> if none is selected */ public int getSelectedIndex() { return listBox.getSelectedIndex(); } /** * Gets the value associated with the item at a given index. * * @param index * the index of the item to be retrieved * @return the item's associated value * @throws IndexOutOfBoundsException * if the index is out of range */ public T getValue(int index) { return values.get(index); } /** * Gets the value for currently selected item. If multiple items are * selected, this method will return the value of the first selected item. * * @return the value for selected item, or {@code null} if none is selected */ public T getSelectedValue() { try { return values.get(getSelectedIndex()); } catch (IndexOutOfBoundsException ex) { return null; } } /** * Gets the number of items that are visible. If only one item is visible, * then the box will be displayed as a drop-down list. * * @return the visible item count */ public int getVisibleItemCount() { return listBox.getVisibleItemCount(); } /** * Determines whether an individual list item is selected. * * @param index * the index of the item to be tested * @return <code>true</code> if the item is selected * @throws IndexOutOfBoundsException * if the index is out of range */ public boolean isItemSelected(int index) { return listBox.isItemSelected(index); } /** * Removes the item at the specified index. * * @param index * the index of the item to be removed * @throws IndexOutOfBoundsException * if the index is out of range */ public void removeItem(int index) { values.remove(index); listBox.removeItem(index); } // utility methods /** * Returns all selected values of the list box, or empty array if none. * * @return the selected values of the list box */ public String[] getItemsSelected() { List<String> selected = new LinkedList<>(); for (int i = 0; i < listBox.getItemCount(); i++) { if (listBox.isItemSelected(i)) { selected.add(listBox.getValue(i)); } } return selected.toArray(new String[selected.size()]); } /** * Sets the currently selected value. * * After calling this method, only the specified item in the list will * remain selected. For a ListBox with multiple selection enabled, see * {@link #setValueSelected(String, boolean)} to select multiple items at a * time. * * @param value * the value of the item to be selected */ public void setSelectedValue(String value) { int idx = getIndex(value); if (idx >= 0) { setSelectedIndex(idx); } } /** * Gets the index of the specified value. * * @param value * the value of the item to be found * @return the index of the value */ public int getIndex(String value) { int count = getItemCount(); for (int i = 0; i < count; i++) { if (getValue(i).equals(value)) { return i; } } return -1; } /** * Sets whether an individual list value is selected. * * @param value the value of the item to be selected or unselected * @param selected <code>true</code> to select the item */ public void setValueSelected(String value, boolean selected) { int idx = getIndex(value); if (idx >= 0) { setItemSelected(idx, selected); } } /** * Removes a value from the list box. Nothing is done if the value isn't on * the list box. * * @param value the value to be removed from the list */ public void removeValue(String value) { int idx = getIndex(value); if (idx >= 0) { removeItem(idx); } } @Override public void setEnabled(boolean enabled) { listBox.setEnabled(enabled); } /** * Use your own key factory for value keys. */ public void setKeyFactory(KeyFactory<T, String> keyFactory) { this.keyFactory = keyFactory; } }
Fina Fixed - Preserve the initialized variable - fixed for selectedIndex.
gwt-material/src/main/java/gwt/material/design/client/ui/MaterialListValueBox.java
Fina Fixed - Preserve the initialized variable - fixed for selectedIndex.
Java
apache-2.0
f3ae3fb93de04fb52c4d18840a6780b08c890788
0
donNewtonAlpha/onos,Shashikanth-Huawei/bmp,oplinkoms/onos,kuujo/onos,opennetworkinglab/onos,Shashikanth-Huawei/bmp,sdnwiselab/onos,chenxiuyang/onos,oplinkoms/onos,sonu283304/onos,zsh2938/onos,zsh2938/onos,lsinfo3/onos,packet-tracker/onos,mengmoya/onos,kuujo/onos,rvhub/onos,kkkane/ONOS,y-higuchi/onos,gkatsikas/onos,donNewtonAlpha/onos,packet-tracker/onos,oplinkoms/onos,lsinfo3/onos,kuujo/onos,oplinkoms/onos,maheshraju-Huawei/actn,jinlongliu/onos,jinlongliu/onos,mengmoya/onos,kuujo/onos,LorenzReinhart/ONOSnew,VinodKumarS-Huawei/ietf96yang,kuujo/onos,kkkane/ONOS,maheshraju-Huawei/actn,VinodKumarS-Huawei/ietf96yang,LorenzReinhart/ONOSnew,maheshraju-Huawei/actn,oplinkoms/onos,sdnwiselab/onos,oplinkoms/onos,oeeagle/onos,chinghanyu/onos,LorenzReinhart/ONOSnew,gkatsikas/onos,osinstom/onos,mengmoya/onos,sonu283304/onos,osinstom/onos,y-higuchi/onos,Shashikanth-Huawei/bmp,zsh2938/onos,planoAccess/clonedONOS,osinstom/onos,packet-tracker/onos,kkkane/ONOS,rvhub/onos,lsinfo3/onos,maheshraju-Huawei/actn,chenxiuyang/onos,mengmoya/onos,planoAccess/clonedONOS,LorenzReinhart/ONOSnew,LorenzReinhart/ONOSnew,chinghanyu/onos,chenxiuyang/onos,zsh2938/onos,chinghanyu/onos,oeeagle/onos,donNewtonAlpha/onos,opennetworkinglab/onos,Shashikanth-Huawei/bmp,castroflavio/onos,mengmoya/onos,gkatsikas/onos,osinstom/onos,castroflavio/onos,sonu283304/onos,kuujo/onos,castroflavio/onos,oeeagle/onos,opennetworkinglab/onos,opennetworkinglab/onos,packet-tracker/onos,sonu283304/onos,VinodKumarS-Huawei/ietf96yang,sdnwiselab/onos,sdnwiselab/onos,donNewtonAlpha/onos,gkatsikas/onos,y-higuchi/onos,osinstom/onos,jinlongliu/onos,y-higuchi/onos,sdnwiselab/onos,opennetworkinglab/onos,Shashikanth-Huawei/bmp,VinodKumarS-Huawei/ietf96yang,sdnwiselab/onos,oeeagle/onos,oplinkoms/onos,rvhub/onos,jinlongliu/onos,planoAccess/clonedONOS,opennetworkinglab/onos,rvhub/onos,chinghanyu/onos,gkatsikas/onos,y-higuchi/onos,chenxiuyang/onos,donNewtonAlpha/onos,gkatsikas/onos,lsinfo3/onos,castroflavio/onos,planoAccess/clonedONOS,maheshraju-Huawei/actn,VinodKumarS-Huawei/ietf96yang,kkkane/ONOS,kuujo/onos
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.flowobjective; import com.google.common.annotations.Beta; import org.onosproject.core.ApplicationId; import org.onosproject.net.flow.criteria.Criterion; import java.util.Collection; /** * Represents a filtering flow objective. Each filtering flow objective * is made up of a key (criterion) to a set of criteria. Using this information * a pipeline aware driver will decide how this objective should be mapped * to the specific device pipeline. For example, consider the following * filtering objective: * * portX -&gt; {MAC1, IP1, MAC2} * * The driver could decide to pass L3 packet to the L3 table and L2 packets to * the L2 table for packets arriving on portX. * * Filtering objectives do not only represent what should be permitted into the * pipeline but can also be used to deny or drop unwanted packets by specifying * the appropriate type of filtering objective. It is also important to note * that submitting a filtering objective does not necessarily result in rules * programmed at the switch, the driver is free to decide when these rules are * programmed. For example, a filtering rule may only be programmed once a * corresponding forwarding objective has been received. */ @Beta public interface FilteringObjective extends Objective { enum Type { /** * Enables the filtering condition. */ PERMIT, /** * Disables the filtering condition. */ DENY } /** * Obtain the key for this filter. * * @return a criterion */ Criterion key(); /** * Obtain this filtering type. * * @return the type */ Type type(); /** * The set of conditions the filter must provision at the device. * * @return a collection of criteria */ Collection<Criterion> conditions(); /** * Builder of Filtering objective entities. */ interface Builder extends Objective.Builder { /** * Specify the key for the filter. * * @param key a criterion * @return a filter objective builder */ Builder withKey(Criterion key); /** * Add a filtering condition. * * @param criterion new criterion * @return a filtering builder */ Builder addCondition(Criterion criterion); /** * Permit this filtering condition set. * * @return a filtering builder */ Builder permit(); /** * Deny this filtering condition set. * * @return a filtering builder */ Builder deny(); /** * Assigns an application id. * * @param appId an application id * @return a filtering builder */ Builder fromApp(ApplicationId appId); /** * Builds the filtering objective that will be added. * * @return a filtering objective */ FilteringObjective add(); /** * Builds the filtering objective that will be removed. * * @return a filtering objective. */ FilteringObjective remove(); /** * Builds the filtering objective that will be added. * The context will be used to notify the calling application. * * @param context an objective context * @return a filtering objective */ FilteringObjective add(ObjectiveContext context); /** * Builds the filtering objective that will be removed. * The context will be used to notify the calling application. * * @param context an objective context * @return a filtering objective */ FilteringObjective remove(ObjectiveContext context); } }
core/api/src/main/java/org/onosproject/net/flowobjective/FilteringObjective.java
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.flowobjective; import com.google.common.annotations.Beta; import org.onosproject.core.ApplicationId; import org.onosproject.net.flow.criteria.Criterion; import java.util.Collection; /** * Represents a filtering flow objective. Each filtering flow objective * is made up of a key (criterion) to a set of criteria. Using this information * a pipeline aware driver will decide how this objective should be mapped * to the specific device pipeline. For example, consider the following * filtering objective: * * portX -&gt; {MAC1, IP1, MAC2} * * The driver could decide to pass L3 packet to the L3 table and L2 packets to * the L2 table for packets arriving on portX. * * Filtering objectives do not only represent what should be permitted into the * pipeline but can also be used to deny or drop unwanted packets by specifying * the appropriate type of filtering objective. It is also important to note * that submitting a filtering objective does not necessarily result in rules * programmed at the switch, the driver is free to decide when these rules are * programmed. For example, a filtering rule may only be programmed once a * corresponding forwarding objective has been received. */ @Beta public interface FilteringObjective extends Objective { enum Type { /** * Enables the filtering condition. */ PERMIT, /** * Disables the filtering condition. */ DENY } /** * Obtain the key for this filter. * * @return a criterion */ Criterion key(); /** * Obtain this filtering type. * @return the type */ Type type(); /** * The set of conditions the filter must provision at the device. * * @return a collection of criteria */ Collection<Criterion> conditions(); /** * Builder of Filtering objective entities. */ interface Builder extends Objective.Builder { /** * Specify the key for the filter. * * @param key a criterion * @return a filter objective builder */ Builder withKey(Criterion key); /** * Add a filtering condition. * * @param criterion new criterion * @return a filtering builder */ Builder addCondition(Criterion criterion); /** * Permit this filtering condition set. * @return a filtering builder */ Builder permit(); /** * Deny this filtering condition set. * @return a filtering builder */ Builder deny(); /** * Assigns an application id. * @param appId an application id * @return a filtering builder */ Builder fromApp(ApplicationId appId); /** * Builds the filtering objective that will be added. * * @return a filtering objective */ FilteringObjective add(); /** * Builds the filtering objective that will be removed. * * @return a filtering objective. */ FilteringObjective remove(); /** * Builds the filtering objective that will be added. * The context will be used to notify the calling application. * * @param context an objective context * @return a filtering objective */ FilteringObjective add(ObjectiveContext context); /** * Builds the filtering objective that will be removed. * The context will be used to notify the calling application. * * @param context an objective context * @return a filtering objective */ FilteringObjective remove(ObjectiveContext context); } }
Follow the convention for Javadoc Change-Id: I32742fdeafaa08171915883c6d6fdc0373db9efa
core/api/src/main/java/org/onosproject/net/flowobjective/FilteringObjective.java
Follow the convention for Javadoc
Java
apache-2.0
c24c296f9aef8c1234c3a5d33abcabf16ddb00c4
0
pponec/ujorm,pponec/ujorm,pponec/ujorm
/* * Copyright 2020-2020 Pavel Ponec, https://github.com/pponec * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ujorm.tools.web.ajax; import java.time.Duration; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.ujorm.tools.Assert; import org.ujorm.tools.Check; import org.ujorm.tools.web.Element; import org.ujorm.tools.web.Html; import org.ujorm.tools.web.ao.HttpParameter; import org.ujorm.tools.web.ao.Injector; /** * A common Javascript Writer of the Ujorm framework * * @author Pavel Ponec */ public class JavaScriptWriter implements Injector { /** Default AJAX request parameter name */ public static final HttpParameter DEFAULT_AJAX_REQUEST_PARAM = new HttpParameter() { @Override public String toString() { return "_ajax"; } }; /** Default AJAX request parameter name */ public static final HttpParameter DEFAULT_SORT_REQUEST_PARAM = new HttpParameter() { @Override public String toString() { return "_sort"; } }; /** Default duration */ public static final Duration DEFAULT_DURATION = Duration.ofMillis(250); /** Javascript ajax request parameter */ protected final HttpParameter ajaxRequestParam; /** Javascript ajax request parameter */ protected final HttpParameter sortRequestParam; /** Input selectors */ protected final CharSequence[] inputCssSelectors; /** Input idle delay */ @Nonnull protected Duration idleDelay = DEFAULT_DURATION; /** Form selector */ protected String formSelector = "form"; /** On load submit request */ protected boolean onLoadSubmit = false; /** New line characters */ protected CharSequence newLine = "\n"; /** A subtitle selector */ @Nullable protected CharSequence subtitleSelector; /** A subtitle selector */ @Nonnull protected CharSequence errorMessage = "AJAX fails due"; /** Ajax Timeout */ @Nonnull protected Duration ajaxTimeout = Duration.ofMillis(30_000); /** JavaScript version */ protected int version = 1; /** Javascript ajax request parameter */ protected String ajaxRequestPath = "/ajax"; /** Is the table sortable */ protected boolean isSortable = true; /** Function order of name */ protected int fceOrder = 1; public JavaScriptWriter() { this("form input"); } public JavaScriptWriter(@Nonnull CharSequence... inputSelectors) { this(DEFAULT_DURATION, DEFAULT_AJAX_REQUEST_PARAM, DEFAULT_SORT_REQUEST_PARAM, inputSelectors); } public JavaScriptWriter( @Nonnull Duration idleDelay, @Nonnull HttpParameter ajaxRequestParam, @Nonnull HttpParameter sortRequestParam, @Nonnull CharSequence... inputSelectors) { this.idleDelay = Assert.notNull(idleDelay, "idleDelay"); this.ajaxRequestParam = Assert.notNull(ajaxRequestParam, "ajaxRequestParam"); this.sortRequestParam = Assert.notNull(sortRequestParam, "sortRequestParam"); this.inputCssSelectors = Assert.hasLength(inputSelectors, "inputSelectors"); } public JavaScriptWriter setFormSelector(String formSelector) { this.formSelector = Assert.notNull(formSelector, "formSelector"); return this; } public JavaScriptWriter setOnLoadSubmit(boolean onLoadSubmit) { this.onLoadSubmit = onLoadSubmit; return this; } public JavaScriptWriter setNewLine(@Nonnull CharSequence newLine) { this.newLine = Assert.notNull(newLine, "newLine"); return this; } /** Assign a subtitle CSS selector */ public JavaScriptWriter setSubtitleSelector(CharSequence subtitleSelector) { this.subtitleSelector = subtitleSelector; return this; } /** Assign an AJAX error message */ public JavaScriptWriter setErrorMessage(@Nullable CharSequence errorMessage) { this.errorMessage = Assert.hasLength(errorMessage, "errorMessage"); return this; } /** Assign an AJAX timeout */ public JavaScriptWriter setAjaxTimeout(@Nonnull Duration ajaxTimeout) { this.ajaxTimeout = Assert.notNull(ajaxTimeout, "ajaxTimeout"); return this; } /** Assign an AJAX timeout */ public JavaScriptWriter setAjaxRequestPath(@Nonnull String ajaxRequestPath) { this.ajaxRequestPath = ajaxRequestPath; setVersion(2); return this; } /** Assign an AJAX timeout */ public JavaScriptWriter setVersion(int version) { this.version = version; return this; } /** Assign a Sortable table */ public JavaScriptWriter setSortable(boolean isSortable) { this.isSortable = isSortable; return this; } /** Set a function order */ public JavaScriptWriter setSortable(int fceOrder) { this.fceOrder = fceOrder; return this; } /** Set a function order name */ public int getFceOrder() { return fceOrder; } /** * Generate a Javascript */ @Override public void write(@Nonnull final Element parent) { try (Element js = parent.addElement(Html.SCRIPT)) { js.addRawText(newLine); js.addRawText("var f", fceOrder, "=function(){"); if (Check.hasLength(inputCssSelectors)) { final String inpSelectors = Stream.of(inputCssSelectors) .collect(Collectors.joining(", ")); js.addRawTexts(newLine, "" , "var timeout=null, ajaxRun=false, submitReq=false;" , "$('" + inpSelectors + "').keyup(function(){" , " if (timeout){" , " clearTimeout(timeout);" , " }" , " timeout=setTimeout(function(){" , " timeout=null;" , " if(ajaxRun){submitReq=true;}" , " else{$('" + formSelector + "').submit();}" , " }, " + idleDelay.toMillis() + ");" , "});" ); } { js.addRawTexts(newLine, "" , "$('form').submit(function(event){" , " event.preventDefault();" , " ajaxRun=true;" , " var data=$('" + formSelector + "').serialize();" , " $.ajax(" + (version == 2 ? ("{ url:'" + ajaxRequestPath + "'") : ("{ url:'?" + ajaxRequestParam + "=true'")) + ", type:'POST'" + ", data:data" + ", timeout:" + ajaxTimeout.toMillis() + ", error:function(xhr,ajaxOptions,thrownError){", Check.hasLength(subtitleSelector) ? " ajaxRun=false;" + " $('" + subtitleSelector + "').html('" + errorMessage + ":' + thrownError);":"" , " }" + ", success:function(result){" , " var jsn=JSON.parse(result);" , " $.each(jsn,function(key,value){" , " $(key).html(value);" , " }); " , " if(submitReq){submitReq=false; $('" + formSelector + "').submit();} " , " else{ajaxRun=false;}" , " }});" , "});" ); if (onLoadSubmit) { js.addRawText(newLine, " $('" + formSelector + "').submit();"); } } js.addRawText("};"); if (isSortable) { js.addRawText(newLine, "f1.sort=function(col){"); js.addRawText(newLine, " document.querySelector('", "input[name=\"", sortRequestParam, "\"]').value=col;"); js.addRawText(newLine, " if(this.ajaxRun){this.submitReq=true;}"); js.addRawText(newLine, " else{document.querySelector('", formSelector , "').submit();}"); js.addRawText(newLine, "};"); } js.addRawText("$(document).ready(f", fceOrder, ");"); } } }
project-m2/ujo-web/src/main/java/org/ujorm/tools/web/ajax/JavaScriptWriter.java
/* * Copyright 2020-2020 Pavel Ponec, https://github.com/pponec * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ujorm.tools.web.ajax; import java.time.Duration; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.ujorm.tools.Assert; import org.ujorm.tools.Check; import org.ujorm.tools.web.Element; import org.ujorm.tools.web.Html; import org.ujorm.tools.web.ao.HttpParameter; import org.ujorm.tools.web.ao.Injector; /** * A common Javascript Writer of the Ujorm framework * * @author Pavel Ponec */ public class JavaScriptWriter implements Injector { /** Default AJAX request parameter name */ public static final HttpParameter DEFAULT_AJAX_REQUEST_PARAM = new HttpParameter() { @Override public String toString() { return "_ajax"; } }; /** Default AJAX request parameter name */ public static final HttpParameter DEFAULT_SORT_REQUEST_PARAM = new HttpParameter() { @Override public String toString() { return "_sort"; } }; /** Default duration */ public static final Duration DEFAULT_DURATION = Duration.ofMillis(250); /** Javascript ajax request parameter */ protected final HttpParameter ajaxRequestParam; /** Javascript ajax request parameter */ protected final HttpParameter sortRequestParam; /** Input selectors */ protected final CharSequence[] inputCssSelectors; /** Input idle delay */ @Nonnull protected Duration idleDelay = DEFAULT_DURATION; /** Form selector */ protected String formSelector = "form"; /** On load submit request */ protected boolean onLoadSubmit = false; /** New line characters */ protected CharSequence newLine = "\n"; /** A subtitle selector */ @Nullable protected CharSequence subtitleSelector; /** A subtitle selector */ @Nonnull protected CharSequence errorMessage = "AJAX fails due"; /** Ajax Timeout */ @Nonnull protected Duration ajaxTimeout = Duration.ofMillis(30_000); /** JavaScript version */ protected int version = 1; /** Javascript ajax request parameter */ protected String ajaxRequestPath = "/ajax"; /** Is the table sortable */ protected boolean isSortable = true; /** Function order of name */ protected int fceOrder = 1; public JavaScriptWriter() { this("form input"); } public JavaScriptWriter(@Nonnull CharSequence... inputSelectors) { this(DEFAULT_DURATION, DEFAULT_AJAX_REQUEST_PARAM, DEFAULT_SORT_REQUEST_PARAM, inputSelectors); } public JavaScriptWriter( @Nonnull Duration idleDelay, @Nonnull HttpParameter ajaxRequestParam, @Nonnull HttpParameter sortRequestParam, @Nonnull CharSequence... inputSelectors) { this.idleDelay = Assert.notNull(idleDelay, "idleDelay"); this.ajaxRequestParam = Assert.notNull(ajaxRequestParam, "ajaxRequestParam"); this.sortRequestParam = Assert.notNull(ajaxRequestParam, "sortRequestParam"); this.inputCssSelectors = Assert.hasLength(inputSelectors, "inputSelectors"); } public JavaScriptWriter setFormSelector(String formSelector) { this.formSelector = Assert.notNull(formSelector, "formSelector"); return this; } public JavaScriptWriter setOnLoadSubmit(boolean onLoadSubmit) { this.onLoadSubmit = onLoadSubmit; return this; } public JavaScriptWriter setNewLine(@Nonnull CharSequence newLine) { this.newLine = Assert.notNull(newLine, "newLine"); return this; } /** Assign a subtitle CSS selector */ public JavaScriptWriter setSubtitleSelector(CharSequence subtitleSelector) { this.subtitleSelector = subtitleSelector; return this; } /** Assign an AJAX error message */ public JavaScriptWriter setErrorMessage(@Nullable CharSequence errorMessage) { this.errorMessage = Assert.hasLength(errorMessage, "errorMessage"); return this; } /** Assign an AJAX timeout */ public JavaScriptWriter setAjaxTimeout(@Nonnull Duration ajaxTimeout) { this.ajaxTimeout = Assert.notNull(ajaxTimeout, "ajaxTimeout"); return this; } /** Assign an AJAX timeout */ public JavaScriptWriter setAjaxRequestPath(@Nonnull String ajaxRequestPath) { this.ajaxRequestPath = ajaxRequestPath; setVersion(2); return this; } /** Assign an AJAX timeout */ public JavaScriptWriter setVersion(int version) { this.version = version; return this; } /** Assign a Sortable table */ public JavaScriptWriter setSortable(boolean isSortable) { this.isSortable = isSortable; return this; } /** Set a function order */ public JavaScriptWriter setSortable(int fceOrder) { this.fceOrder = fceOrder; return this; } /** Set a function order name */ public int getFceOrder() { return fceOrder; } /** * Generate a Javascript */ @Override public void write(@Nonnull final Element parent) { try (Element js = parent.addElement(Html.SCRIPT)) { js.addRawText(newLine); js.addRawText("var f", fceOrder, "=function(){"); if (Check.hasLength(inputCssSelectors)) { final String inpSelectors = Stream.of(inputCssSelectors) .collect(Collectors.joining(", ")); js.addRawTexts(newLine, "" , "var timeout=null, ajaxRun=false, submitReq=false;" , "$('" + inpSelectors + "').keyup(function(){" , " if (timeout){" , " clearTimeout(timeout);" , " }" , " timeout=setTimeout(function(){" , " timeout=null;" , " if(ajaxRun){submitReq=true;}" , " else{$('" + formSelector + "').submit();}" , " }, " + idleDelay.toMillis() + ");" , "});" ); } { js.addRawTexts(newLine, "" , "$('form').submit(function(event){" , " event.preventDefault();" , " ajaxRun=true;" , " var data=$('" + formSelector + "').serialize();" , " $.ajax(" + (version == 2 ? ("{ url:'" + ajaxRequestPath + "'") : ("{ url:'?" + ajaxRequestParam + "=true'")) + ", type:'POST'" + ", data:data" + ", timeout:" + ajaxTimeout.toMillis() + ", error:function(xhr,ajaxOptions,thrownError){", Check.hasLength(subtitleSelector) ? " ajaxRun=false;" + " $('" + subtitleSelector + "').html('" + errorMessage + ":' + thrownError);":"" , " }" + ", success:function(result){" , " var jsn=JSON.parse(result);" , " $.each(jsn,function(key,value){" , " $(key).html(value);" , " }); " , " if(submitReq){submitReq=false; $('" + formSelector + "').submit();} " , " else{ajaxRun=false;}" , " }});" , "});" ); if (onLoadSubmit) { js.addRawText(newLine, " $('" + formSelector + "').submit();"); } if (isSortable) { js.addRawText(newLine, "function sort(col){"); js.addRawText(newLine, " document.querySelector('", "input[name=\"", ajaxRequestParam, "\"]').value=col;"); js.addRawText(newLine, " if(ajaxRun){submitReq=true;}"); js.addRawText(newLine, " else{document.querySelector('", formSelector , "').submit();}"); js.addRawText(newLine, "}"); } } js.addRawText("};"); js.addRawText("$(document).ready(f", fceOrder, ");"); } } }
Sample: JS AJAX code fixing
project-m2/ujo-web/src/main/java/org/ujorm/tools/web/ajax/JavaScriptWriter.java
Sample: JS AJAX code fixing
Java
apache-2.0
caf14137c1d8d296fc20976a7b34a0da8a6471ef
0
jenmalloy/enmasse,EnMasseProject/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse,jenmalloy/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse,jenmalloy/enmasse,EnMasseProject/enmasse
package io.enmasse.systemtest.authz; import io.enmasse.systemtest.*; import io.enmasse.systemtest.amqp.AmqpClient; import org.apache.qpid.proton.message.Message; import org.junit.Before; import java.util.Collections; import java.util.List; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public abstract class AuthorizationTestBase extends TestBaseWithDefault { private static final Destination queue = Destination.queue("authz-queue"); private static final Destination topic = Destination.topic("authz-topic"); private static final Destination anycast = Destination.anycast("authz-anycast"); private static final Destination multicast = Destination.multicast("authz-multicast"); @Before public void initAddresses() throws Exception { setAddresses(defaultAddressSpace, queue, topic); if(getAddressSpaceType() == AddressSpaceType.STANDARD){ setAddresses(defaultAddressSpace, anycast, multicast); } } protected void doTestSendAuthz() throws Exception { KeycloakCredentials allowedUser = new KeycloakCredentials("sender", "senderPa55"); getKeycloakClient().createUser(defaultAddressSpace.getName(), allowedUser.getUsername(), allowedUser.getPassword(), Group.SEND_ALL.toString()); assertSend(allowedUser.getUsername(), allowedUser.getPassword()); getKeycloakClient().deleteUser(defaultAddressSpace.getName(), allowedUser.getUsername()); KeycloakCredentials noAllowedUser = new KeycloakCredentials("nobody", "nobodyPa55"); getKeycloakClient().createUser(defaultAddressSpace.getName(), noAllowedUser.getUsername(), noAllowedUser.getPassword(), Group.RECV_ALL.toString()); assertCannotSend(noAllowedUser.getUsername(), noAllowedUser.getPassword()); getKeycloakClient().deleteUser(defaultAddressSpace.getName(), noAllowedUser.getUsername()); } protected void doTestSendReceiveAuthz() throws Exception { KeycloakCredentials allowedUser = new KeycloakCredentials("receiver", "receiverPa55"); getKeycloakClient().createUser(defaultAddressSpace.getName(), allowedUser.getUsername(), allowedUser.getPassword(), Group.SEND_ALL.toString(), Group.RECV_ALL.toString()); assertReceive(allowedUser.getUsername(), allowedUser.getPassword()); getKeycloakClient().deleteUser(defaultAddressSpace.getName(), allowedUser.getUsername()); KeycloakCredentials noAllowedUser = new KeycloakCredentials("nobody", "nobodyPa55"); getKeycloakClient().createUser(defaultAddressSpace.getName(), noAllowedUser.getUsername(), noAllowedUser.getPassword(), Group.SEND_ALL.toString()); assertCannotReceive(noAllowedUser.getUsername(), noAllowedUser.getPassword()); getKeycloakClient().deleteUser(defaultAddressSpace.getName(), noAllowedUser.getUsername()); } private void assertSend(String username, String password) throws Exception { assertTrue(canSend(queue, username, password)); assertTrue(canSend(topic, username, password)); if (getAddressSpaceType() == AddressSpaceType.STANDARD) { assertTrue(canSend(multicast, username, password)); assertTrue(canSend(anycast, username, password)); } } private void assertCannotSend(String username, String password) throws Exception { assertFalse(canSend(queue, username, password)); assertFalse(canSend(topic, username, password)); if (getAddressSpaceType() == AddressSpaceType.STANDARD) { assertFalse(canSend(multicast, username, password)); assertFalse(canSend(anycast, username, password)); } } private void assertReceive(String username, String password) throws Exception { assertTrue(canReceive(queue, username, password)); assertTrue(canReceive(topic, username, password)); if (getAddressSpaceType() == AddressSpaceType.STANDARD) { assertTrue(canReceive(multicast, username, password)); assertTrue(canReceive(anycast, username, password)); } } private void assertCannotReceive(String username, String password) throws Exception { assertFalse(canReceive(queue, username, password)); assertFalse(canReceive(topic, username, password)); if (getAddressSpaceType() == AddressSpaceType.STANDARD) { assertFalse(canReceive(multicast, username, password)); assertFalse(canReceive(anycast, username, password)); } } private boolean canSend(Destination destination, String username, String password) throws Exception { AmqpClient client = createClient(destination, username, password); return client.sendMessages(destination.getAddress(), Collections.singletonList("msg1"), 10, TimeUnit.SECONDS).get(30, TimeUnit.SECONDS) == 1; } private boolean canReceive(Destination destination, String username, String password) throws Exception { AmqpClient client = createClient(destination, username, password); Future<List<Message>> received = client.recvMessages(destination.getAddress(), 1, 10, TimeUnit.SECONDS); Future<Integer> sent = client.sendMessages(destination.getAddress(), Collections.singletonList("msg1"), 10, TimeUnit.SECONDS); return received.get(1, TimeUnit.MINUTES).size() == sent.get(1, TimeUnit.MINUTES); } private AmqpClient createClient(Destination dest, String username, String password) throws Exception { AmqpClient client = null; switch (dest.getType()) { case "queue": client = amqpClientFactory.createQueueClient(defaultAddressSpace); break; case "topic": client = amqpClientFactory.createTopicClient(defaultAddressSpace); break; case "anycast": client = amqpClientFactory.createQueueClient(defaultAddressSpace); break; case "multicast": client = amqpClientFactory.createBroadcastClient(defaultAddressSpace); break; } client.getConnectOptions().setUsername(username).setPassword(password); return client; } }
systemtests/src/test/java/io/enmasse/systemtest/authz/AuthorizationTestBase.java
package io.enmasse.systemtest.authz; import io.enmasse.systemtest.*; import io.enmasse.systemtest.amqp.AmqpClient; import org.apache.qpid.proton.message.Message; import org.junit.Before; import java.util.Collections; import java.util.List; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public abstract class AuthorizationTestBase extends TestBaseWithDefault { private static final Destination queue = Destination.queue("authz-queue"); private static final Destination topic = Destination.topic("authz-topic"); private static final Destination anycast = Destination.anycast("authz-anycast"); private static final Destination multicast = Destination.multicast("authz-multicast"); @Before public void initAddresses() throws Exception { setAddresses(defaultAddressSpace, queue, topic, anycast, multicast); } protected void doTestSendAuthz() throws Exception { KeycloakCredentials allowedUser = new KeycloakCredentials("sender", "senderPa55"); getKeycloakClient().createUser(defaultAddressSpace.getName(), allowedUser.getUsername(), allowedUser.getPassword(), Group.SEND_ALL.toString()); assertSend(allowedUser.getUsername(), allowedUser.getPassword()); getKeycloakClient().deleteUser(defaultAddressSpace.getName(), allowedUser.getUsername()); KeycloakCredentials noAllowedUser = new KeycloakCredentials("nobody", "nobodyPa55"); getKeycloakClient().createUser(defaultAddressSpace.getName(), noAllowedUser.getUsername(), noAllowedUser.getPassword(), Group.RECV_ALL.toString()); assertCannotSend(noAllowedUser.getUsername(), noAllowedUser.getPassword()); getKeycloakClient().deleteUser(defaultAddressSpace.getName(), noAllowedUser.getUsername()); } protected void doTestSendReceiveAuthz() throws Exception { KeycloakCredentials allowedUser = new KeycloakCredentials("receiver", "receiverPa55"); getKeycloakClient().createUser(defaultAddressSpace.getName(), allowedUser.getUsername(), allowedUser.getPassword(), Group.SEND_ALL.toString(), Group.RECV_ALL.toString()); assertReceive(allowedUser.getUsername(), allowedUser.getPassword()); getKeycloakClient().deleteUser(defaultAddressSpace.getName(), allowedUser.getUsername()); KeycloakCredentials noAllowedUser = new KeycloakCredentials("nobody", "nobodyPa55"); getKeycloakClient().createUser(defaultAddressSpace.getName(), noAllowedUser.getUsername(), noAllowedUser.getPassword(), Group.SEND_ALL.toString()); assertCannotReceive(noAllowedUser.getUsername(), noAllowedUser.getPassword()); getKeycloakClient().deleteUser(defaultAddressSpace.getName(), noAllowedUser.getUsername()); } private void assertSend(String username, String password) throws Exception { assertTrue(canSend(queue, username, password)); assertTrue(canSend(topic, username, password)); if (getAddressSpaceType() == AddressSpaceType.STANDARD) { assertTrue(canSend(multicast, username, password)); assertTrue(canSend(anycast, username, password)); } } private void assertCannotSend(String username, String password) throws Exception { assertFalse(canSend(queue, username, password)); assertFalse(canSend(topic, username, password)); if (getAddressSpaceType() == AddressSpaceType.STANDARD) { assertFalse(canSend(multicast, username, password)); assertFalse(canSend(anycast, username, password)); } } private void assertReceive(String username, String password) throws Exception { assertTrue(canReceive(queue, username, password)); assertTrue(canReceive(topic, username, password)); if (getAddressSpaceType() == AddressSpaceType.STANDARD) { assertTrue(canReceive(multicast, username, password)); assertTrue(canReceive(anycast, username, password)); } } private void assertCannotReceive(String username, String password) throws Exception { assertFalse(canReceive(queue, username, password)); assertFalse(canReceive(topic, username, password)); if (getAddressSpaceType() == AddressSpaceType.STANDARD) { assertFalse(canReceive(multicast, username, password)); assertFalse(canReceive(anycast, username, password)); } } private boolean canSend(Destination destination, String username, String password) throws Exception { AmqpClient client = createClient(destination, username, password); return client.sendMessages(destination.getAddress(), Collections.singletonList("msg1"), 10, TimeUnit.SECONDS).get(30, TimeUnit.SECONDS) == 1; } private boolean canReceive(Destination destination, String username, String password) throws Exception { AmqpClient client = createClient(destination, username, password); Future<List<Message>> received = client.recvMessages(destination.getAddress(), 1, 10, TimeUnit.SECONDS); Future<Integer> sent = client.sendMessages(destination.getAddress(), Collections.singletonList("msg1"), 10, TimeUnit.SECONDS); return received.get(1, TimeUnit.MINUTES).size() == sent.get(1, TimeUnit.MINUTES); } private AmqpClient createClient(Destination dest, String username, String password) throws Exception { AmqpClient client = null; switch (dest.getType()) { case "queue": client = amqpClientFactory.createQueueClient(defaultAddressSpace); break; case "topic": client = amqpClientFactory.createTopicClient(defaultAddressSpace); break; case "anycast": client = amqpClientFactory.createQueueClient(defaultAddressSpace); break; case "multicast": client = amqpClientFactory.createBroadcastClient(defaultAddressSpace); break; } client.getConnectOptions().setUsername(username).setPassword(password); return client; } }
FIX multicast, anycast only in standard
systemtests/src/test/java/io/enmasse/systemtest/authz/AuthorizationTestBase.java
FIX multicast, anycast only in standard
Java
apache-2.0
27f429a51d1c723e34aa1918ff2b0f8da41d58b3
0
eboudrant/net.ebt.muzei.miyazaki,eboudrant/net.ebt.muzei.miyazaki,eboudrant/net.ebt.muzei.miyazaki
package net.ebt.muzei.miyazaki.activity; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.provider.Settings; import android.support.v4.app.FragmentActivity; import android.view.View; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.FrameLayout; import android.widget.SeekBar; import android.widget.TextView; import android.widget.Toast; import net.ebt.muzei.miyazaki.BuildConfig; import net.ebt.muzei.miyazaki.R; import net.ebt.muzei.miyazaki.app.MuzeiMiyazakiApplication; import net.ebt.muzei.miyazaki.model.Artwork; import net.ebt.muzei.miyazaki.service.MuzeiMiyazakiService; import net.ebt.muzei.miyazaki.util.UiUtils; import net.ebt.muzei.miyazaki.util.Utils; import java.util.List; import java.util.logging.Level; import static net.ebt.muzei.miyazaki.Constants.ACTION_RELOAD; import static net.ebt.muzei.miyazaki.Constants.CURRENT_PREF_NAME; import static net.ebt.muzei.miyazaki.Constants.DEFAULT_INTERVAL; import static net.ebt.muzei.miyazaki.Constants.INTERVALS; import static net.ebt.muzei.miyazaki.Constants.MUZEI_COLOR; import static net.ebt.muzei.miyazaki.Constants.MUZEI_FRAME; import static net.ebt.muzei.miyazaki.Constants.MUZEI_INTERVAL; import static net.ebt.muzei.miyazaki.Constants.MUZEI_WIFI; public class MuzeiMiyazakiSettings extends FragmentActivity { private static final String TAG = "MuzeiMiyazakiSettings"; private static final float ALPHA_DEACTIVATED = 0.3f; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); final String muzeiPackageId = Build.VERSION.SDK_INT > Build.VERSION_CODES.JELLY_BEAN ? "net.nurik.roman.muzei" : "net.nurik.roman.muzei.muik"; final SharedPreferences settings = getApplicationContext().getSharedPreferences(CURRENT_PREF_NAME, Context.MODE_PRIVATE); if (getIntent() != null && getIntent().getAction() != null && getIntent().getAction().equals(Intent.ACTION_MAIN)) { try { Intent launchIntent = getPackageManager().getLaunchIntentForPackage(muzeiPackageId); if (launchIntent != null) { if (!settings.getBoolean("onboarding", false)) { Toast.makeText(this, getResources().getString(R.string.setup_muzei), Toast.LENGTH_LONG).show(); } launchIntent.addCategory(Intent.CATEGORY_DEFAULT); launchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED); startActivity(launchIntent); finish(); return; } else { Toast.makeText(this, getResources().getString(R.string.install_muzei), Toast.LENGTH_LONG).show(); Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse("market://details?id=" + muzeiPackageId)); startActivity(intent); finish(); return; } } catch (Throwable e) { // No playstore } } setContentView(R.layout.settings); int interval = settings.getInt(MUZEI_INTERVAL, DEFAULT_INTERVAL); final View colors = findViewById(R.id.colors); final SeekBar seekBar = (SeekBar) findViewById(R.id.muzei_interval); final CheckBox wifi = (CheckBox) findViewById(R.id.muzei_wifi); final TextView configLabel = (TextView) findViewById(R.id.muzei_config_label); final TextView label = (TextView) findViewById(R.id.muzei_label); configLabel.setText("Refresh every " + Utils.formatDuration(INTERVALS.get(interval))); seekBar.setMax(INTERVALS.size() - 1); seekBar.setProgress(interval); seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { label.setText(Utils.formatDuration(INTERVALS.get(progress)).toUpperCase()); } @Override public void onStartTrackingTouch(SeekBar seekBar) { configLabel.setVisibility(View.INVISIBLE); colors.setVisibility(View.GONE); label.setVisibility(View.VISIBLE); } @Override public void onStopTrackingTouch(SeekBar seekBar) { label.setText(null); label.setVisibility(View.GONE); configLabel.setText("Refresh every " + Utils.formatDuration(INTERVALS.get(seekBar.getProgress()))); configLabel.setVisibility(View.VISIBLE); colors.setVisibility(View.VISIBLE); SharedPreferences.Editor editor = settings.edit(); editor.putInt(MUZEI_INTERVAL, seekBar.getProgress()); editor.commit(); Intent intent = new Intent(MuzeiMiyazakiService.ACTION_RESCHEDULE); intent.setClass(seekBar.getContext(), MuzeiMiyazakiService.class); startService(intent); } }); wifi.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { SharedPreferences.Editor editor = settings.edit(); editor.putBoolean(MUZEI_WIFI, isChecked); editor.commit(); } }); wifi.setChecked(settings.getBoolean(MUZEI_WIFI, true)); label.setVisibility(View.GONE); findViewById(R.id.seeall).setVisibility(View.GONE); findViewById(R.id.seeall).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { try { Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse("http://muzeighibli.net?jsessionid=" + String.valueOf(System.currentTimeMillis()).hashCode() + "&a=" + Settings.Secure.getString(MuzeiMiyazakiSettings.this.getContentResolver(), Settings.Secure.ANDROID_ID).hashCode())); startActivity(intent); UiUtils.makeToast(MuzeiMiyazakiSettings.this, R.string.help_captions, Level.INFO); } catch (ActivityNotFoundException e) { UiUtils.makeToast(MuzeiMiyazakiSettings.this, R.string.install_chrome, Level.INFO); } } }); } @Override protected void onResume() { super.onResume(); updateMatches(getApplicationContext().getSharedPreferences(CURRENT_PREF_NAME, Context.MODE_PRIVATE)); } public void onColor(View view) { if (view instanceof FrameLayout) { view = FrameLayout.class.cast(view).getChildAt(0); } final SharedPreferences settings = getApplicationContext().getSharedPreferences(CURRENT_PREF_NAME, Context.MODE_PRIVATE); boolean remove = false; String color = settings.getString(MUZEI_COLOR, ""); findViewById(R.id.black).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.maroon).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.navy).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.teal).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.green).setAlpha(ALPHA_DEACTIVATED); if (view.getId() == R.id.black) { if ("black".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "black").commit(); } else if (view.getId() == R.id.grey) { if ("grey".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "grey").commit(); } else if (view.getId() == R.id.silver) { if ("silver".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "silver").commit(); } else if (view.getId() == R.id.maroon) { if ("maroon".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "maroon").commit(); } else if (view.getId() == R.id.olive) { if ("olive".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "olive").commit(); } else if (view.getId() == R.id.green) { if ("green".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "green").commit(); } else if (view.getId() == R.id.teal) { if ("teal".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "teal").commit(); } else if (view.getId() == R.id.navy) { if ("navy".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "navy").commit(); } else if (view.getId() == R.id.purple) { if ("purple".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "purple").commit(); } if (!remove) { Intent intent = new Intent(ACTION_RELOAD); intent.setClass(this, MuzeiMiyazakiService.class); startService(intent); } updateMatches(settings); if (!remove) { view.setAlpha(1.0f); } } private void updateMatches(SharedPreferences settings) { String frame = settings.getString(MUZEI_FRAME, null); String color = settings.getString(MUZEI_COLOR, null); int matches = 0; boolean ok; List<Artwork> artworks = MuzeiMiyazakiApplication.getInstance().getArtworks(); if (artworks != null) { for (Artwork artwork : artworks) { ok = false; if (color == null || artwork.colors.get(color) > MuzeiMiyazakiApplication.getInstance().get(color)) ok = true; if (ok && frame != null) { if ("portrait".equals(frame)) { ok = artwork.ratio < 1.0f; } else if ("ultra_wide".equals(frame)) { ok = artwork.ratio > 3.0f; } else if ("wide".equals(frame)) { ok = artwork.ratio >= 1.0f && artwork.ratio <= 3.0f; } } if (ok) matches++; } if (BuildConfig.DEBUG) { ((TextView) findViewById(R.id.matches)).setText("Using " + matches + " artworks (" + MuzeiMiyazakiApplication.getInstance().getPercentWithCaption() + "%)"); } else { ((TextView) findViewById(R.id.matches)).setText("Using " + matches + " artworks"); } findViewById(R.id.black).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.maroon).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.navy).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.teal).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.green).setAlpha(ALPHA_DEACTIVATED); if ("black".equals(color)) findViewById(R.id.black).setAlpha(1.0f); if ("maroon".equals(color)) findViewById(R.id.maroon).setAlpha(1.0f); if ("navy".equals(color)) findViewById(R.id.navy).setAlpha(1.0f); if ("teal".equals(color)) findViewById(R.id.teal).setAlpha(1.0f); if ("green".equals(color)) findViewById(R.id.green).setAlpha(1.0f); } } public void onFrameLayout(View view) { final SharedPreferences settings = getApplicationContext().getSharedPreferences(CURRENT_PREF_NAME, Context.MODE_PRIVATE); String frame = settings.getString(MUZEI_FRAME, null); if (view.getId() == R.id.frame_ultra_wide) { if ("ultra_wide".equals(frame)) settings.edit().remove(MUZEI_FRAME).commit(); else settings.edit().putString(MUZEI_FRAME, "ultra_wide").commit(); } else if (view.getId() == R.id.frame_portrait) { if ("portrait".equals(frame)) settings.edit().remove(MUZEI_FRAME).commit(); else settings.edit().putString(MUZEI_FRAME, "portrait").commit(); } else if (view.getId() == R.id.frame_wide) { if ("wide".equals(frame)) settings.edit().remove(MUZEI_FRAME).commit(); else settings.edit().putString(MUZEI_FRAME, "wide").commit(); } Intent intent = new Intent(ACTION_RELOAD); intent.setClass(this, MuzeiMiyazakiService.class); startService(intent); updateMatches(settings); } }
app/src/main/java/net/ebt/muzei/miyazaki/activity/MuzeiMiyazakiSettings.java
package net.ebt.muzei.miyazaki.activity; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.provider.Settings; import android.support.v4.app.FragmentActivity; import android.view.View; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.FrameLayout; import android.widget.SeekBar; import android.widget.TextView; import android.widget.Toast; import net.ebt.muzei.miyazaki.BuildConfig; import net.ebt.muzei.miyazaki.R; import net.ebt.muzei.miyazaki.app.MuzeiMiyazakiApplication; import net.ebt.muzei.miyazaki.model.Artwork; import net.ebt.muzei.miyazaki.service.MuzeiMiyazakiService; import net.ebt.muzei.miyazaki.util.UiUtils; import net.ebt.muzei.miyazaki.util.Utils; import java.util.List; import java.util.logging.Level; import static net.ebt.muzei.miyazaki.Constants.ACTION_RELOAD; import static net.ebt.muzei.miyazaki.Constants.CURRENT_PREF_NAME; import static net.ebt.muzei.miyazaki.Constants.DEFAULT_INTERVAL; import static net.ebt.muzei.miyazaki.Constants.INTERVALS; import static net.ebt.muzei.miyazaki.Constants.MUZEI_COLOR; import static net.ebt.muzei.miyazaki.Constants.MUZEI_FRAME; import static net.ebt.muzei.miyazaki.Constants.MUZEI_INTERVAL; import static net.ebt.muzei.miyazaki.Constants.MUZEI_WIFI; public class MuzeiMiyazakiSettings extends FragmentActivity { private static final String TAG = "MuzeiMiyazakiSettings"; private static final float ALPHA_DEACTIVATED = 0.3f; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); final String muzeiPackageId = Build.VERSION.SDK_INT > Build.VERSION_CODES.JELLY_BEAN ? "net.nurik.roman.muzei" : "net.nurik.roman.muzei.muik"; final SharedPreferences settings = getApplicationContext().getSharedPreferences(CURRENT_PREF_NAME, Context.MODE_PRIVATE); if (getIntent() != null && getIntent().getAction() != null && getIntent().getAction().equals(Intent.ACTION_MAIN)) { try { Intent launchIntent = getPackageManager().getLaunchIntentForPackage(muzeiPackageId); if (launchIntent != null) { if (!settings.getBoolean("onboarding", false)) { Toast.makeText(this, getResources().getString(R.string.setup_muzei), Toast.LENGTH_LONG).show(); } launchIntent.addCategory(Intent.CATEGORY_DEFAULT); launchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED); startActivity(launchIntent); finish(); return; } else { Toast.makeText(this, getResources().getString(R.string.install_muzei), Toast.LENGTH_LONG).show(); Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse("market://details?id=" + muzeiPackageId)); startActivity(intent); finish(); return; } } catch (Throwable e) { // No playstore } } setContentView(R.layout.settings); int interval = settings.getInt(MUZEI_INTERVAL, DEFAULT_INTERVAL); final View colors = findViewById(R.id.colors); final SeekBar seekBar = (SeekBar) findViewById(R.id.muzei_interval); final CheckBox wifi = (CheckBox) findViewById(R.id.muzei_wifi); final TextView configLabel = (TextView) findViewById(R.id.muzei_config_label); final TextView label = (TextView) findViewById(R.id.muzei_label); configLabel.setText("Refresh every " + Utils.formatDuration(INTERVALS.get(interval))); seekBar.setMax(INTERVALS.size() - 1); seekBar.setProgress(interval); seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { label.setText(Utils.formatDuration(INTERVALS.get(progress)).toUpperCase()); } @Override public void onStartTrackingTouch(SeekBar seekBar) { configLabel.setVisibility(View.INVISIBLE); colors.setVisibility(View.GONE); label.setVisibility(View.VISIBLE); } @Override public void onStopTrackingTouch(SeekBar seekBar) { label.setText(null); label.setVisibility(View.GONE); configLabel.setText("Refresh every " + Utils.formatDuration(INTERVALS.get(seekBar.getProgress()))); configLabel.setVisibility(View.VISIBLE); colors.setVisibility(View.VISIBLE); SharedPreferences.Editor editor = settings.edit(); editor.putInt(MUZEI_INTERVAL, seekBar.getProgress()); editor.commit(); Intent intent = new Intent(MuzeiMiyazakiService.ACTION_RESCHEDULE); intent.setClass(seekBar.getContext(), MuzeiMiyazakiService.class); startService(intent); } }); wifi.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { SharedPreferences.Editor editor = settings.edit(); editor.putBoolean(MUZEI_WIFI, isChecked); editor.commit(); } }); wifi.setChecked(settings.getBoolean(MUZEI_WIFI, true)); label.setVisibility(View.GONE); findViewById(R.id.seeall).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { try { Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse("http://muzeighibli.net?jsessionid=" + String.valueOf(System.currentTimeMillis()).hashCode() + "&a=" + Settings.Secure.getString(MuzeiMiyazakiSettings.this.getContentResolver(), Settings.Secure.ANDROID_ID).hashCode())); startActivity(intent); UiUtils.makeToast(MuzeiMiyazakiSettings.this, R.string.help_captions, Level.INFO); } catch (ActivityNotFoundException e) { UiUtils.makeToast(MuzeiMiyazakiSettings.this, R.string.install_chrome, Level.INFO); } } }); } @Override protected void onResume() { super.onResume(); updateMatches(getApplicationContext().getSharedPreferences(CURRENT_PREF_NAME, Context.MODE_PRIVATE)); } public void onColor(View view) { if (view instanceof FrameLayout) { view = FrameLayout.class.cast(view).getChildAt(0); } final SharedPreferences settings = getApplicationContext().getSharedPreferences(CURRENT_PREF_NAME, Context.MODE_PRIVATE); boolean remove = false; String color = settings.getString(MUZEI_COLOR, ""); findViewById(R.id.black).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.maroon).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.navy).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.teal).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.green).setAlpha(ALPHA_DEACTIVATED); if (view.getId() == R.id.black) { if ("black".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "black").commit(); } else if (view.getId() == R.id.grey) { if ("grey".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "grey").commit(); } else if (view.getId() == R.id.silver) { if ("silver".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "silver").commit(); } else if (view.getId() == R.id.maroon) { if ("maroon".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "maroon").commit(); } else if (view.getId() == R.id.olive) { if ("olive".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "olive").commit(); } else if (view.getId() == R.id.green) { if ("green".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "green").commit(); } else if (view.getId() == R.id.teal) { if ("teal".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "teal").commit(); } else if (view.getId() == R.id.navy) { if ("navy".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "navy").commit(); } else if (view.getId() == R.id.purple) { if ("purple".equals(color)) { settings.edit().remove(MUZEI_COLOR).commit(); remove = true; } else settings.edit().putString(MUZEI_COLOR, "purple").commit(); } if (!remove) { Intent intent = new Intent(ACTION_RELOAD); intent.setClass(this, MuzeiMiyazakiService.class); startService(intent); } updateMatches(settings); if (!remove) { view.setAlpha(1.0f); } } private void updateMatches(SharedPreferences settings) { String frame = settings.getString(MUZEI_FRAME, null); String color = settings.getString(MUZEI_COLOR, null); int matches = 0; boolean ok; List<Artwork> artworks = MuzeiMiyazakiApplication.getInstance().getArtworks(); if (artworks != null) { for (Artwork artwork : artworks) { ok = false; if (color == null || artwork.colors.get(color) > MuzeiMiyazakiApplication.getInstance().get(color)) ok = true; if (ok && frame != null) { if ("portrait".equals(frame)) { ok = artwork.ratio < 1.0f; } else if ("ultra_wide".equals(frame)) { ok = artwork.ratio > 3.0f; } else if ("wide".equals(frame)) { ok = artwork.ratio >= 1.0f && artwork.ratio <= 3.0f; } } if (ok) matches++; } if (BuildConfig.DEBUG) { ((TextView) findViewById(R.id.matches)).setText("Using " + matches + " artworks (" + MuzeiMiyazakiApplication.getInstance().getPercentWithCaption() + "%)"); } else { ((TextView) findViewById(R.id.matches)).setText("Using " + matches + " artworks"); } findViewById(R.id.black).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.maroon).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.navy).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.teal).setAlpha(ALPHA_DEACTIVATED); findViewById(R.id.green).setAlpha(ALPHA_DEACTIVATED); if ("black".equals(color)) findViewById(R.id.black).setAlpha(1.0f); if ("maroon".equals(color)) findViewById(R.id.maroon).setAlpha(1.0f); if ("navy".equals(color)) findViewById(R.id.navy).setAlpha(1.0f); if ("teal".equals(color)) findViewById(R.id.teal).setAlpha(1.0f); if ("green".equals(color)) findViewById(R.id.green).setAlpha(1.0f); } } public void onFrameLayout(View view) { final SharedPreferences settings = getApplicationContext().getSharedPreferences(CURRENT_PREF_NAME, Context.MODE_PRIVATE); String frame = settings.getString(MUZEI_FRAME, null); if (view.getId() == R.id.frame_ultra_wide) { if ("ultra_wide".equals(frame)) settings.edit().remove(MUZEI_FRAME).commit(); else settings.edit().putString(MUZEI_FRAME, "ultra_wide").commit(); } else if (view.getId() == R.id.frame_portrait) { if ("portrait".equals(frame)) settings.edit().remove(MUZEI_FRAME).commit(); else settings.edit().putString(MUZEI_FRAME, "portrait").commit(); } else if (view.getId() == R.id.frame_wide) { if ("wide".equals(frame)) settings.edit().remove(MUZEI_FRAME).commit(); else settings.edit().putString(MUZEI_FRAME, "wide").commit(); } Intent intent = new Intent(ACTION_RELOAD); intent.setClass(this, MuzeiMiyazakiService.class); startService(intent); updateMatches(settings); } }
remove link to suspended website
app/src/main/java/net/ebt/muzei/miyazaki/activity/MuzeiMiyazakiSettings.java
remove link to suspended website
Java
bsd-3-clause
ba078255285ecec18c5b839a9144c0ec0bb2ba90
0
andronix3/SwingHacks
package com.smartg.swing; import java.awt.Cursor; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Font; import java.awt.Graphics; import java.awt.Insets; import java.awt.Rectangle; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.Dictionary; import java.util.Hashtable; import javax.swing.BoundedRangeModel; import javax.swing.DefaultBoundedRangeModel; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JSlider; import javax.swing.JToggleButton; import javax.swing.SwingConstants; import javax.swing.border.EmptyBorder; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.plaf.basic.BasicSliderUI; import com.smartg.java.util.EventListenerListIterator; /** * JRangeSlider. This class implements slider with two values. Second value is * equals to first value plus extent, so I just reused BoundedRangeModel. * JRangeSlider will look correct on all platforms (using appropriate SliderUI). * * @author andronix * */ public class JRangeSlider extends JPanel { // used to get access to protected goodies private final class RangeSliderUI extends BasicSliderUI { public RangeSliderUI(JSlider slider) { super(slider); } Rectangle getThumbRect() { calculateThumbLocation(); return new Rectangle(this.thumbRect); } } private final class MouseHandler extends MouseAdapter { private int cursorType; private int pressX, pressY; private int modelValue; private int modelExtent; @Override public void mouseMoved(MouseEvent e) { int x = e.getX(); int y = e.getY(); boolean horizontal = (slider.getOrientation() == SwingConstants.HORIZONTAL); if (extentThumbRect.contains(x,y)) { cursorType = horizontal ? Cursor.E_RESIZE_CURSOR : Cursor.N_RESIZE_CURSOR; } else if (thumbRect.contains(x,y)) { cursorType = horizontal ? Cursor.W_RESIZE_CURSOR : Cursor.S_RESIZE_CURSOR; } else if (middleRect.contains(x,y)) { cursorType = Cursor.MOVE_CURSOR; } else { cursorType = Cursor.DEFAULT_CURSOR; } setCursor(Cursor.getPredefinedCursor(cursorType)); } @Override public void mouseDragged(MouseEvent e) { float delta; switch (cursorType) { case Cursor.DEFAULT_CURSOR: break; case Cursor.MOVE_CURSOR: if (slider.getOrientation() == SwingConstants.HORIZONTAL) { delta = (pressX - e.getX()) * scaleX; model.setValue((int) (modelValue - delta)); } else { delta = -(pressY - e.getY()) * scaleY; model.setValue((int) (modelValue - delta)); } repaint(); break; case Cursor.E_RESIZE_CURSOR: delta = (pressX - e.getX()) * scaleX; int extent = (int) (modelExtent - delta); if (extent < 0) { setValue(modelValue + extent); model.setExtent(0); } else { model.setExtent(extent); } repaint(); break; case Cursor.W_RESIZE_CURSOR: delta = (pressX - e.getX()) * scaleX; if (delta > modelValue) { delta = modelValue; } setValue((int) (modelValue - delta)); repaint(); break; case Cursor.N_RESIZE_CURSOR: delta = -(pressY - e.getY()) * scaleY; extent = (int) (modelExtent - delta); if (extent < 0) { setValue(modelValue + extent); model.setExtent(0); } else { model.setExtent(extent); } repaint(); break; case Cursor.S_RESIZE_CURSOR: delta = -(pressY - e.getY()) * scaleY; if (delta > modelValue) { delta = modelValue; } setValue((int) (modelValue - delta)); repaint(); break; } } @Override public void mousePressed(MouseEvent e) { pressX = e.getX(); pressY = e.getY(); modelValue = model.getValue(); modelExtent = model.getExtent(); } } private static final long serialVersionUID = -4923076507643832793L; private BoundedRangeModel model = new DefaultBoundedRangeModel(); private MouseHandler mouseHandler = new MouseHandler(); private float scaleX, scaleY; private Rectangle thumbRect, middleRect, extentThumbRect; private JSlider slider = new JSlider(); public JRangeSlider() { this(0, 100, 0, 10); } public JRangeSlider(int min, int max, int value, int extent) { model.setMinimum(min); model.setMaximum(max); model.setValue(value); model.setExtent(extent); slider.setUI(new RangeSliderUI(slider)); slider.setMinimum(min); slider.setMaximum(max); addMouseListener(mouseHandler); addMouseMotionListener(mouseHandler); addComponentListener(new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { computeScaleX(); computeScaleY(); } }); setBorder(new EmptyBorder(1, 1, 1, 1)); model.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { fireChangeEvent(); repaint(); } }); } public int getValue() { return model.getValue(); } public void setValue(int i) { i = clamp(i); int v = model.getValue(); int e = model.getExtent(); model.setRangeProperties(i, v + e - i, model.getMinimum(), model.getMaximum(), false); } private int clamp(int i) { int max = model.getMaximum(); if (i > max) { i = max; } int min = model.getMinimum(); if (i < min) { i = min; } return i; } public int getSecondValue() { return model.getValue() + model.getExtent(); } public void setSecondValue(int i) { i = clamp(i); int v = model.getValue(); model.setExtent(i - v); } private void fireChangeEvent() { EventListenerListIterator<ChangeListener> iter = new EventListenerListIterator<ChangeListener>(ChangeListener.class, listenerList); ChangeEvent e = new ChangeEvent(this); while (iter.hasNext()) { ChangeListener next = iter.next(); next.stateChanged(e); } } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); slider.setBounds(getBounds()); slider.setValue(0); RangeSliderUI ui = (RangeSliderUI) slider.getUI(); if(getPaintTrack()) { ui.paintTrack(g); } slider.setValue(model.getValue() + model.getExtent()); extentThumbRect = ui.getThumbRect(); Rectangle clip = g.getClipBounds(); if (getOrientation() == SwingConstants.HORIZONTAL) { g.setClip((int) (model.getValue() / scaleX), 0, getWidth(), getHeight()); } slider.paint(g); g.setClip(clip.x, clip.y, clip.width, clip.height); if(getPaintLabels()) { ui.paintLabels(g); } if(getPaintTicks()) { ui.paintTicks(g); } slider.setValue(model.getValue()); thumbRect = ui.getThumbRect(); ui.paintThumb(g); switch (slider.getOrientation()) { case SwingConstants.HORIZONTAL: middleRect = new Rectangle(thumbRect); middleRect.width = extentThumbRect.x - thumbRect.x; break; case SwingConstants.VERTICAL: middleRect = new Rectangle(extentThumbRect); middleRect.height = thumbRect.y - extentThumbRect.y; break; } } private void computeScaleX() { float width = getWidth(); Insets ins = getInsets(); width -= ins.left + ins.right; int min = model.getMinimum(); int max = model.getMaximum(); float size = max - min; scaleX = size / width; } private void computeScaleY() { float height = getHeight(); Insets ins = getInsets(); height -= ins.top + ins.bottom; int min = model.getMinimum(); int max = model.getMaximum(); float size = max - min; scaleY = size / height; } // all following methods just forwarding calls to/from JSlider @SuppressWarnings("rawtypes") public Dictionary getLabelTable() { return slider.getLabelTable(); } @SuppressWarnings("rawtypes") public void setLabelTable(Dictionary labels) { slider.setLabelTable(labels); } public boolean getPaintLabels() { return slider.getPaintLabels(); } public void setPaintLabels(boolean b) { slider.setPaintLabels(b); } public boolean getPaintTrack() { return slider.getPaintTrack(); } public void setPaintTrack(boolean b) { slider.setPaintTrack(b); } public boolean getPaintTicks() { return slider.getPaintTicks(); } public void setPaintTicks(boolean b) { slider.setPaintTicks(b); } public boolean getSnapToTicks() { return slider.getSnapToTicks(); } public void setSnapToTicks(boolean b) { slider.setSnapToTicks(b); } public int getMinorTickSpacing() { return slider.getMinorTickSpacing(); } public void setMinorTickSpacing(int n) { slider.setMinorTickSpacing(n); } public int getMajorTickSpacing() { return slider.getMajorTickSpacing(); } public void setMajorTickSpacing(int n) { slider.setMajorTickSpacing(n); } public boolean getInverted() { return slider.getInverted(); } public void setInverted(boolean b) { slider.setInverted(b); } public void setFont(Font font) { if (slider != null) { slider.setFont(font); } } @SuppressWarnings("rawtypes") public Hashtable createStandardLabels(int increment, int start) { return slider.createStandardLabels(increment, start); } @SuppressWarnings("rawtypes") public Hashtable createStandardLabels(int increment) { return slider.createStandardLabels(increment); } @Override public Dimension getPreferredSize() { return slider.getPreferredSize(); } @Override public void setPreferredSize(Dimension preferredSize) { slider.setPreferredSize(preferredSize); } public int getOrientation() { return slider.getOrientation(); } public void setOrientation(int orientation) { slider.setOrientation(orientation); } public void addChangeListener(ChangeListener l) { listenerList.add(ChangeListener.class, l); } public void removeChangeListener(ChangeListener l) { listenerList.remove(ChangeListener.class, l); } public boolean getValueIsAdjusting() { return slider.getValueIsAdjusting(); } public void setValueIsAdjusting(boolean b) { slider.setValueIsAdjusting(b); } public int getMaximum() { return slider.getMaximum(); } public void setMaximum(int maximum) { model.setMaximum(maximum); slider.setMaximum(maximum); } public int getMinimum() { return slider.getMinimum(); } public void setMinimum(int minimum) { model.setMinimum(minimum); slider.setMinimum(minimum); } public BoundedRangeModel getModel() { return model; } public void setModel(BoundedRangeModel newModel) { this.model = newModel; slider.setMinimum(model.getMinimum()); slider.setMaximum(model.getMaximum()); } public ChangeListener[] getChangeListeners() { return listenerList.getListeners(ChangeListener.class); } public static void main(String... s) { JFrame frame = new JFrame(); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.getContentPane().setLayout(new FlowLayout()); final JRangeSlider jrs = new JRangeSlider(0, 100, 20, 30); jrs.setOrientation(SwingConstants.VERTICAL); final JToggleButton jtb = new JToggleButton("ChangeValue"); jtb.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (jtb.isSelected()) { jrs.setValue(30); } else { jrs.setValue(70); } } }); frame.getContentPane().add(jrs); frame.getContentPane().add(jtb); frame.getContentPane().add(new JSlider()); frame.pack(); frame.setVisible(true); } }
com/smartg/swing/JRangeSlider.java
package com.smartg.swing; import java.awt.Cursor; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Font; import java.awt.Graphics; import java.awt.Insets; import java.awt.Rectangle; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.Dictionary; import java.util.Hashtable; import javax.swing.BoundedRangeModel; import javax.swing.DefaultBoundedRangeModel; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JSlider; import javax.swing.JToggleButton; import javax.swing.SwingConstants; import javax.swing.border.EmptyBorder; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.plaf.basic.BasicSliderUI; import com.smartg.java.util.EventListenerListIterator; /** * JRangeSlider. This class implements slider with two values. Second value is * equals to first value plus extent, so I just reused BoundedRangeModel. * JRangeSlider will look correct on all platforms (using appropriate SliderUI). * * @author andronix * */ public class JRangeSlider extends JPanel { // used to get access to protected goodies private final class RangeSliderUI extends BasicSliderUI { public RangeSliderUI(JSlider slider) { super(slider); } Rectangle getThumbRect() { calculateThumbLocation(); return new Rectangle(this.thumbRect); } } private final class MouseHandler extends MouseAdapter { private int cursorType; private int pressX, pressY; private int modelValue; private int modelExtent; @Override public void mouseMoved(MouseEvent e) { int x = e.getX(); int y = e.getY(); boolean horizontal = (slider.getOrientation() == SwingConstants.HORIZONTAL); if (upperThumbRect.contains(x,y)) { cursorType = horizontal ? Cursor.E_RESIZE_CURSOR : Cursor.N_RESIZE_CURSOR; } else if (lowerThumbRect.contains(x,y)) { cursorType = horizontal ? Cursor.W_RESIZE_CURSOR : Cursor.S_RESIZE_CURSOR; } else if (middleRect.contains(x,y)) { cursorType = Cursor.MOVE_CURSOR; } else { cursorType = Cursor.DEFAULT_CURSOR; } setCursor(Cursor.getPredefinedCursor(cursorType)); } @Override public void mouseDragged(MouseEvent e) { float delta; switch (cursorType) { case Cursor.DEFAULT_CURSOR: break; case Cursor.MOVE_CURSOR: if (slider.getOrientation() == SwingConstants.HORIZONTAL) { delta = (pressX - e.getX()) * scaleX; model.setValue((int) (modelValue - delta)); } else { delta = -(pressY - e.getY()) * scaleY; model.setValue((int) (modelValue - delta)); } repaint(); break; case Cursor.E_RESIZE_CURSOR: delta = (pressX - e.getX()) * scaleX; int extent = (int) (modelExtent - delta); if (extent < 0) { setValue(modelValue + extent); model.setExtent(0); } else { model.setExtent(extent); } repaint(); break; case Cursor.W_RESIZE_CURSOR: delta = (pressX - e.getX()) * scaleX; if (delta > modelValue) { delta = modelValue; } setValue((int) (modelValue - delta)); repaint(); break; case Cursor.N_RESIZE_CURSOR: delta = -(pressY - e.getY()) * scaleY; extent = (int) (modelExtent - delta); if (extent < 0) { setValue(modelValue + extent); model.setExtent(0); } else { model.setExtent(extent); } repaint(); break; case Cursor.S_RESIZE_CURSOR: delta = -(pressY - e.getY()) * scaleY; if (delta > modelValue) { delta = modelValue; } setValue((int) (modelValue - delta)); repaint(); break; } } @Override public void mousePressed(MouseEvent e) { pressX = e.getX(); pressY = e.getY(); modelValue = model.getValue(); modelExtent = model.getExtent(); } } private static final long serialVersionUID = -4923076507643832793L; private BoundedRangeModel model = new DefaultBoundedRangeModel(); private MouseHandler mouseHandler = new MouseHandler(); private float scaleX, scaleY; private Rectangle lowerThumbRect, middleRect, upperThumbRect; private JSlider slider = new JSlider(); public JRangeSlider() { this(0, 100, 0, 10); } public JRangeSlider(int min, int max, int value, int extent) { model.setMinimum(min); model.setMaximum(max); model.setValue(value); model.setExtent(extent); slider.setUI(new RangeSliderUI(slider)); slider.setMinimum(min); slider.setMaximum(max); addMouseListener(mouseHandler); addMouseMotionListener(mouseHandler); addComponentListener(new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { computeScaleX(); computeScaleY(); } }); setBorder(new EmptyBorder(1, 1, 1, 1)); model.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { fireChangeEvent(); repaint(); } }); } public int getValue() { return model.getValue(); } public void setValue(int i) { i = clamp(i); int v = model.getValue(); int e = model.getExtent(); model.setRangeProperties(i, v + e - i, model.getMinimum(), model.getMaximum(), false); } private int clamp(int i) { int max = model.getMaximum(); if (i > max) { i = max; } int min = model.getMinimum(); if (i < min) { i = min; } return i; } public int getSecondValue() { return model.getValue() + model.getExtent(); } public void setSecondValue(int i) { i = clamp(i); int v = model.getValue(); model.setExtent(i - v); } private void fireChangeEvent() { EventListenerListIterator<ChangeListener> iter = new EventListenerListIterator<ChangeListener>(ChangeListener.class, listenerList); ChangeEvent e = new ChangeEvent(this); while (iter.hasNext()) { ChangeListener next = iter.next(); next.stateChanged(e); } } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); slider.setBounds(getBounds()); slider.setValue(0); RangeSliderUI ui = (RangeSliderUI) slider.getUI(); if(getPaintTrack()) { ui.paintTrack(g); } slider.setValue(model.getValue() + model.getExtent()); upperThumbRect = ui.getThumbRect(); Rectangle clip = g.getClipBounds(); if (getOrientation() == SwingConstants.HORIZONTAL) { g.setClip((int) (model.getValue() / scaleX), 0, getWidth(), getHeight()); } slider.paint(g); g.setClip(clip.x, clip.y, clip.width, clip.height); if(getPaintLabels()) { ui.paintLabels(g); } if(getPaintTicks()) { ui.paintTicks(g); } slider.setValue(model.getValue()); lowerThumbRect = ui.getThumbRect(); ui.paintThumb(g); middleRect = new Rectangle(lowerThumbRect); switch (slider.getOrientation()) { case SwingConstants.HORIZONTAL: middleRect.width = upperThumbRect.x - lowerThumbRect.x; break; case SwingConstants.VERTICAL: middleRect.height = upperThumbRect.y - lowerThumbRect.y; break; } } private void computeScaleX() { float width = getWidth(); Insets ins = getInsets(); width -= ins.left + ins.right; int min = model.getMinimum(); int max = model.getMaximum(); float size = max - min; scaleX = size / width; } private void computeScaleY() { float height = getHeight(); Insets ins = getInsets(); height -= ins.top + ins.bottom; int min = model.getMinimum(); int max = model.getMaximum(); float size = max - min; scaleY = size / height; } // all following methods just forwarding calls to/from JSlider @SuppressWarnings("rawtypes") public Dictionary getLabelTable() { return slider.getLabelTable(); } @SuppressWarnings("rawtypes") public void setLabelTable(Dictionary labels) { slider.setLabelTable(labels); } public boolean getPaintLabels() { return slider.getPaintLabels(); } public void setPaintLabels(boolean b) { slider.setPaintLabels(b); } public boolean getPaintTrack() { return slider.getPaintTrack(); } public void setPaintTrack(boolean b) { slider.setPaintTrack(b); } public boolean getPaintTicks() { return slider.getPaintTicks(); } public void setPaintTicks(boolean b) { slider.setPaintTicks(b); } public boolean getSnapToTicks() { return slider.getSnapToTicks(); } public void setSnapToTicks(boolean b) { slider.setSnapToTicks(b); } public int getMinorTickSpacing() { return slider.getMinorTickSpacing(); } public void setMinorTickSpacing(int n) { slider.setMinorTickSpacing(n); } public int getMajorTickSpacing() { return slider.getMajorTickSpacing(); } public void setMajorTickSpacing(int n) { slider.setMajorTickSpacing(n); } public boolean getInverted() { return slider.getInverted(); } public void setInverted(boolean b) { slider.setInverted(b); } public void setFont(Font font) { if (slider != null) { slider.setFont(font); } } @SuppressWarnings("rawtypes") public Hashtable createStandardLabels(int increment, int start) { return slider.createStandardLabels(increment, start); } @SuppressWarnings("rawtypes") public Hashtable createStandardLabels(int increment) { return slider.createStandardLabels(increment); } @Override public Dimension getPreferredSize() { return slider.getPreferredSize(); } @Override public void setPreferredSize(Dimension preferredSize) { slider.setPreferredSize(preferredSize); } public int getOrientation() { return slider.getOrientation(); } public void setOrientation(int orientation) { slider.setOrientation(orientation); } public void addChangeListener(ChangeListener l) { listenerList.add(ChangeListener.class, l); } public void removeChangeListener(ChangeListener l) { listenerList.remove(ChangeListener.class, l); } public boolean getValueIsAdjusting() { return slider.getValueIsAdjusting(); } public void setValueIsAdjusting(boolean b) { slider.setValueIsAdjusting(b); } public int getMaximum() { return slider.getMaximum(); } public void setMaximum(int maximum) { model.setMaximum(maximum); slider.setMaximum(maximum); } public int getMinimum() { return slider.getMinimum(); } public void setMinimum(int minimum) { model.setMinimum(minimum); slider.setMinimum(minimum); } public BoundedRangeModel getModel() { return model; } public void setModel(BoundedRangeModel newModel) { this.model = newModel; slider.setMinimum(model.getMinimum()); slider.setMaximum(model.getMaximum()); } public ChangeListener[] getChangeListeners() { return listenerList.getListeners(ChangeListener.class); } public static void main(String... s) { JFrame frame = new JFrame(); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.getContentPane().setLayout(new FlowLayout()); final JRangeSlider jrs = new JRangeSlider(0, 100, 20, 30); jrs.setOrientation(SwingConstants.VERTICAL); final JToggleButton jtb = new JToggleButton("ChangeValue"); jtb.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (jtb.isSelected()) { jrs.setValue(30); } else { jrs.setValue(70); } } }); frame.getContentPane().add(jrs); frame.getContentPane().add(jtb); frame.getContentPane().add(new JSlider()); frame.pack(); frame.setVisible(true); } }
Rename Thumb Rectangles
com/smartg/swing/JRangeSlider.java
Rename Thumb Rectangles
Java
mit
d3feab967570f4f05244ecf74d177b752502934a
0
student-capture/student-capture
src/test/java/studentcapture/usersettings/UserSettingsResourceTest.java
package studentcapture.usersettings; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.mock.web.MockMultipartFile; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.util.FileCopyUtils; import org.springframework.util.LinkedMultiValueMap; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.client.RestTemplate; import org.springframework.web.context.WebApplicationContext; import studentcapture.config.StudentCaptureApplication; import studentcapture.config.StudentCaptureApplicationTests; import java.io.File; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.when; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.fileUpload; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Created by andreassavva on 2016-05-12. */ public class UserSettingsResourceTest extends StudentCaptureApplicationTests { @Autowired private WebApplicationContext webApplicationContext; @Autowired private RestTemplate templateMock; private MockMvc mockMvc; @Test public void testCorrectSettingsInput() throws Exception { if (mockMvc == null) { System.err.println("NULL"); } ResponseEntity<String> response = new ResponseEntity<String>(HttpStatus.OK); when( templateMock.postForEntity (any(String.class), any(LinkedMultiValueMap.class), eq(String.class))) .thenReturn(response); mockMvc.perform(post("/settings") .param("userID", "user") .param("language", "english") .param("emailAddress", "[email protected]") .param("textSize", "12") .param("newUser", "true")) .andExpect(status().isOk()); } @Before public void setup() { mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); Mockito.reset(templateMock); } }
removed failing test
src/test/java/studentcapture/usersettings/UserSettingsResourceTest.java
removed failing test
Java
apache-2.0
168d465452b88f7ba9fb44881fdb2f19c012ec4d
0
Frank-Wu/stratosphere-streaming,Frank-Wu/stratosphere-streaming,Frank-Wu/stratosphere-streaming
src/test/java/eu/stratosphere/streaming/api/streamrecord/TupleTest.java
package eu.stratosphere.streaming.api.streamrecord; import static org.junit.Assert.*; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import org.junit.Test; import eu.stratosphere.api.java.tuple.Tuple; import eu.stratosphere.api.java.tuple.Tuple2; import eu.stratosphere.api.java.typeutils.TupleTypeInfo; import eu.stratosphere.api.java.typeutils.TypeInformation; import eu.stratosphere.api.java.typeutils.runtime.TupleSerializer; import eu.stratosphere.pact.runtime.plugable.DeserializationDelegate; import eu.stratosphere.pact.runtime.plugable.SerializationDelegate; import eu.stratosphere.types.StringValue; public class TupleTest { public Tuple readTuple(DataInput in) throws IOException { StringValue typeVal = new StringValue(); typeVal.read(in); // TODO: use Tokenizer String[] types = typeVal.getValue().split(","); Class[] basicTypes = new Class[types.length]; for (int i = 0; i < types.length; i++) { try { basicTypes[i] = Class.forName(types[i]); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } TypeInformation<? extends Tuple> typeInfo = TupleTypeInfo .getBasicTupleTypeInfo(basicTypes); TupleSerializer<Tuple> tupleSerializer = (TupleSerializer<Tuple>) typeInfo .createSerializer(); DeserializationDelegate<Tuple> dd = new DeserializationDelegate<Tuple>( tupleSerializer); dd.setInstance(tupleSerializer.createInstance()); dd.read(in); return dd.getInstance(); } private void writeTuple(Tuple tuple, DataOutput out) { Class[] basicTypes = new Class[tuple.getArity()]; StringBuilder basicTypeNames = new StringBuilder(); for (int i = 0; i < basicTypes.length; i++) { basicTypes[i] = tuple.getField(i).getClass(); basicTypeNames.append(basicTypes[i].getName() + ","); } TypeInformation<? extends Tuple> typeInfo = TupleTypeInfo .getBasicTupleTypeInfo(basicTypes); StringValue typeVal = new StringValue(basicTypeNames.toString()); @SuppressWarnings("unchecked") TupleSerializer<Tuple> tupleSerializer = (TupleSerializer<Tuple>) typeInfo .createSerializer(); SerializationDelegate<Tuple> serializationDelegate = new SerializationDelegate<Tuple>( tupleSerializer); serializationDelegate.setInstance(tuple); try { typeVal.write(out); serializationDelegate.write(out); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Test public void test() { ByteArrayOutputStream buff = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(buff); int num = 42; String str = "above clouds"; Tuple2<Integer, String> tuple = new Tuple2<Integer, String>(num, str); try { writeTuple(tuple, out); DataInputStream in = new DataInputStream(new ByteArrayInputStream( buff.toByteArray())); Tuple2<Integer, String> tupleOut = (Tuple2<Integer, String>) readTuple( in); assertEquals(tupleOut.getField(0), 42); } catch (IOException e) { fail(); e.printStackTrace(); } } }
TupleTest removed
src/test/java/eu/stratosphere/streaming/api/streamrecord/TupleTest.java
TupleTest removed
Java
mit
9ed1d50de0d3d4f95c580dae9b5aa7a618b4e6c0
0
elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin
package com.elmakers.mine.bukkit.action.builtin; import com.elmakers.mine.bukkit.action.CompoundEntityAction; import com.elmakers.mine.bukkit.api.action.CastContext; import com.elmakers.mine.bukkit.api.spell.Spell; import com.elmakers.mine.bukkit.api.spell.TargetType; import com.elmakers.mine.bukkit.utility.Targeting; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.Entity; import java.lang.ref.WeakReference; import java.util.List; public class ConeOfEffectAction extends CompoundEntityAction { private int targetCount; private double range; private Targeting targeting; @Override public void initialize(Spell spell, ConfigurationSection baseParameters) { super.initialize(spell, baseParameters); targeting = new Targeting(); } @Override public void reset(CastContext context) { super.reset(context); createActionContext(context); targeting.start(context.getEyeLocation()); } @Override public void prepare(CastContext context, ConfigurationSection parameters) { super.prepare(context, parameters); targetCount = parameters.getInt("target_count", -1); range = parameters.getDouble("range", 16); range = range * context.getMage().getRangeMultiplier(); targeting.processParameters(parameters); // Some parameter tweaks to make sure things are sane TargetType targetType = targeting.getTargetType(); if (targetType == TargetType.NONE || targetType == TargetType.BLOCK) { targeting.setTargetType(TargetType.OTHER); } else if (targetType == TargetType.SELF) { targeting.setTargetType(TargetType.ANY); } // COE never uses hitbox, there's the Retarget action for that. targeting.setUseHitbox(false); } @Override public void addEntities(CastContext context, List<WeakReference<Entity>> entities) { targeting.getTargetEntities(context, range, targetCount, entities); } }
src/main/java/com/elmakers/mine/bukkit/action/builtin/ConeOfEffectAction.java
package com.elmakers.mine.bukkit.action.builtin; import com.elmakers.mine.bukkit.action.CompoundEntityAction; import com.elmakers.mine.bukkit.api.action.CastContext; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.Entity; import java.lang.ref.WeakReference; import java.util.List; public class ConeOfEffectAction extends CompoundEntityAction { private int targetCount; @Override public void reset(CastContext context) { super.reset(context); createActionContext(context); } @Override public void prepare(CastContext context, ConfigurationSection parameters) { super.prepare(context, parameters); targetCount = parameters.getInt("target_count", -1); } @Override public void addEntities(CastContext context, List<WeakReference<Entity>> entities) { context.getTargetEntities(targetCount, entities); } }
ConeOfEffect uses Targeting class
src/main/java/com/elmakers/mine/bukkit/action/builtin/ConeOfEffectAction.java
ConeOfEffect uses Targeting class
Java
mit
81451f1e750d088105f35e5d1d0e4985f2ce3c7f
0
FASAM-ES/projeto-zeus
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package br.com.fasam.projetoexemplo.entidades; import java.util.ArrayList; import java.util.List; /** * * @author Aluno */ public class Artigo { String titulo; String descricao; Usuario usuario; List<Comentario> comentarios; List<Tag> tags; public Usuario getUsuario() { return usuario; } public void setUsuario(Usuario usuario) { this.usuario = usuario; } public Comentario getComentario(Integer i){ return comentarios.get(i); } public void addComentario(Comentario comentario){ if(this.comentarios == null){ this.comentarios = new ArrayList<Comentario>(); } } public void remComentario(Comentario comentario){ if(this.comentarios == null){ this.comentarios.remove(comentario); } } public Tag getTag(Integer i){ return tags.get(i); } public void addTag(Tag tag){ if(this.tags == null){ this.tags = new ArrayList<Tag>(); } } public void remTag(Tag tag){ if(this.tags == null){ this.tags.remove(tag); } } public String getTitulo() { return titulo; } public void setTitulo(String titulo) { this.titulo = titulo; } public String getDescricao() { return descricao; } public void setDescricao(String descricao) { this.descricao = descricao; } }
ProjetoExemplo/src/main/java/br/com/fasam/projetoexemplo/entidades/Artigo.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package br.com.fasam.projetoexemplo.entidades; /** * * @author Aluno */ public class Artigo { String titulo; String descrição; public String getTitulo() { return titulo; } public void setTitulo(String titulo) { this.titulo = titulo; } public String getDescrição() { return descrição; } public void setDescrição(String descrição) { this.descrição = descrição; } }
Adicionando Relacionamentos a classe Artigo
ProjetoExemplo/src/main/java/br/com/fasam/projetoexemplo/entidades/Artigo.java
Adicionando Relacionamentos a classe Artigo
Java
mit
b14a654507e96bc3e2c1a55d600c76fb1a360122
0
venkatramanm/swf-all,venkatramanm/swf-all,venkatramanm/swf-all
package com.venky.swf.plugins.collab.db.model.participants.admin; import com.venky.swf.db.model.reflection.ModelReflector; import com.venky.swf.db.table.ModelImpl; import com.venky.swf.plugins.collab.db.model.user.User; import com.venky.swf.plugins.security.db.model.Role; import com.venky.swf.plugins.security.db.model.UserRole; import com.venky.swf.pm.DataSecurityFilter; import com.venky.swf.sql.Conjunction; import com.venky.swf.sql.Expression; import com.venky.swf.sql.Operator; import com.venky.swf.sql.Select; import java.util.List; import java.util.Set; import java.util.stream.Collectors; public class CompanyImpl extends ModelImpl<Company>{ public CompanyImpl(Company proxy) { super(proxy); } public Company getSelfCompany(){ return getProxy(); } public List<Long> getStaffUserIds(){ ModelReflector<Role> roleRef = ModelReflector.instance(Role.class); List<Role> staffRoles = new Select().from(Role.class).where(new Expression(roleRef.getPool(),Conjunction.OR). add(new Expression(roleRef.getPool(),"NAME",Operator.EQ,"STAFF")). add(new Expression(roleRef.getPool(),"STAFF",Operator.EQ,true))).execute(); ModelReflector<UserRole> userRoleModelReflector = ModelReflector.instance(UserRole.class); List<UserRole> staffUsers = new Select().from(UserRole.class).where( new Expression(userRoleModelReflector.getPool(),"ROLE_ID",Operator.IN, DataSecurityFilter.getIds(staffRoles).toArray()) ).execute(); Set<Long> userIds = staffUsers.stream().map(su->su.getUserId()).collect(Collectors.toSet()); Company company = getProxy(); ModelReflector<User> ref = ModelReflector.instance(User.class); Expression where = new Expression(ref.getPool(),Conjunction.AND); where.add(new Expression(ref.getPool(), "COMPANY_ID", Operator.EQ, company.getId())); where.add(new Expression(ref.getPool(),"ID",Operator.IN,userIds.toArray())); Select select = new Select("ID").from(User.class).where(where); /* select.add(" and exists (select 1 from user_roles , roles where user_roles.user_id = users.id " + " and roles.id = user_roles.role_id " + " and ( roles.name = 'STAFF' or roles.staff = true ) )" ); */ List<User> users = select.execute(); return DataSecurityFilter.getIds(users); } public Company getCustomer(){ return getProxy(); } public Company getVendor(){ return getProxy(); } }
swf-plugin-collab/src/main/java/com/venky/swf/plugins/collab/db/model/participants/admin/CompanyImpl.java
package com.venky.swf.plugins.collab.db.model.participants.admin; import com.venky.swf.db.model.reflection.ModelReflector; import com.venky.swf.db.table.ModelImpl; import com.venky.swf.plugins.collab.db.model.user.User; import com.venky.swf.plugins.security.db.model.Role; import com.venky.swf.plugins.security.db.model.UserRole; import com.venky.swf.pm.DataSecurityFilter; import com.venky.swf.sql.Conjunction; import com.venky.swf.sql.Expression; import com.venky.swf.sql.Operator; import com.venky.swf.sql.Select; import java.util.List; import java.util.Set; import java.util.stream.Collectors; public class CompanyImpl extends ModelImpl<Company>{ public CompanyImpl(Company proxy) { super(proxy); } public Company getSelfCompany(){ return getProxy(); } public List<Long> getStaffUserIds(){ ModelReflector<Role> roleRef = ModelReflector.instance(Role.class); List<Role> staffRoles = new Select().from(Role.class).where(new Expression(roleRef.getPool(),Conjunction.OR). add(new Expression(roleRef.getPool(),"NAME",Operator.EQ,"STAFF")). add(new Expression(roleRef.getPool(),"STAFF",Operator.EQ,true))).execute(); ModelReflector<UserRole> userRoleModelReflector = ModelReflector.instance(UserRole.class); List<UserRole> staffUsers = new Select().from(UserRole.class).where( new Expression(userRoleModelReflector.getPool(),"ROLE_ID",Operator.IN, DataSecurityFilter.getIds(staffRoles).toArray()) ).execute(); Set<Long> userIds = staffUsers.stream().map(su->su.getUserId()).collect(Collectors.toSet()); Company company = getProxy(); ModelReflector<User> ref = ModelReflector.instance(User.class); Expression where = new Expression(ref.getPool(),Conjunction.AND); where.add(new Expression(ref.getPool(), "COMPANY_ID", Operator.EQ, company.getId())); where.add(new Expression(ref.getPool(),"USER_ID",Operator.IN,userIds.toArray())); Select select = new Select("ID").from(User.class).where(where); /* select.add(" and exists (select 1 from user_roles , roles where user_roles.user_id = users.id " + " and roles.id = user_roles.role_id " + " and ( roles.name = 'STAFF' or roles.staff = true ) )" ); */ List<User> users = select.execute(); return DataSecurityFilter.getIds(users); } public Company getCustomer(){ return getProxy(); } public Company getVendor(){ return getProxy(); } }
Participant Extension clean up
swf-plugin-collab/src/main/java/com/venky/swf/plugins/collab/db/model/participants/admin/CompanyImpl.java
Participant Extension clean up
Java
epl-1.0
7e37423780bd37ff1cce6a61d1a23335a6554c1a
0
OndraZizka/windup,Ladicek/windup,d-s/windup,OndraZizka/windup,Maarc/windup,johnsteele/windup,jsight/windup,windup/windup,johnsteele/windup,mareknovotny/windup,windup/windup,d-s/windup,Maarc/windup,windup/windup,d-s/windup,OndraZizka/windup,Ladicek/windup,Ladicek/windup,Maarc/windup,johnsteele/windup,jsight/windup,mareknovotny/windup,windup/windup,johnsteele/windup,OndraZizka/windup,mareknovotny/windup,mareknovotny/windup,Maarc/windup,Ladicek/windup,d-s/windup,jsight/windup,jsight/windup
package org.jboss.windup.reporting.rules; import org.jboss.forge.furnace.util.Iterators; import org.jboss.windup.config.AbstractRuleProvider; import org.jboss.windup.config.GraphRewrite; import org.jboss.windup.config.metadata.MetadataBuilder; import org.jboss.windup.config.operation.iteration.AbstractIterationOperation; import org.jboss.windup.config.phase.PreReportGenerationPhase; import org.jboss.windup.config.query.Query; import org.jboss.windup.graph.GraphContext; import org.jboss.windup.graph.model.ProjectModel; import org.jboss.windup.graph.model.WindupConfigurationModel; import org.jboss.windup.graph.model.resource.FileModel; import org.jboss.windup.reporting.model.ApplicationReportIndexModel; import org.jboss.windup.reporting.service.ApplicationReportIndexService; import org.jboss.windup.util.exception.WindupException; import org.ocpsoft.rewrite.config.ConditionBuilder; import org.ocpsoft.rewrite.config.Configuration; import org.ocpsoft.rewrite.config.ConfigurationBuilder; import org.ocpsoft.rewrite.context.EvaluationContext; /** * For each input path, creates an index that can be used to register reports related to each application on that input. * * @author <a href="mailto:[email protected]">Jesse Sightler</a> */ public class CreateApplicationReportIndexRuleProvider extends AbstractRuleProvider { public CreateApplicationReportIndexRuleProvider() { super(MetadataBuilder.forProvider(CreateApplicationReportIndexRuleProvider.class) .setPhase(PreReportGenerationPhase.class)); } @Override public Configuration getConfiguration(GraphContext context) { ConditionBuilder applicationsFound = Query.fromType(WindupConfigurationModel.class); AbstractIterationOperation<WindupConfigurationModel> addApplicationReportIndex = new AbstractIterationOperation<WindupConfigurationModel>() { @Override public void perform(GraphRewrite event, EvaluationContext context, WindupConfigurationModel payload) { for (FileModel inputPath : payload.getInputPaths()) { ProjectModel projectModel = inputPath.getProjectModel(); if (projectModel == null) { throw new WindupException("Error, no project found in: " + inputPath.getFilePath()); } createApplicationReportIndex(event.getGraphContext(), projectModel); } } @Override public String toString() { return "AddApplicationReportIndex"; } }; return ConfigurationBuilder.begin() .addRule() .when(applicationsFound) .perform(addApplicationReportIndex); } /** * Create the index and associate it with all project models in the Application */ private ApplicationReportIndexModel createApplicationReportIndex(GraphContext context, ProjectModel applicationProjectModel) { ApplicationReportIndexService applicationReportIndexService = new ApplicationReportIndexService(context); ApplicationReportIndexModel index = applicationReportIndexService.create(); addAllProjectModels(index, applicationProjectModel); return index; } /** * Attach all project models within the application to the index. This will make it easy to navigate from the * projectModel to the application index. */ private void addAllProjectModels(ApplicationReportIndexModel navIdx, ProjectModel projectModel) { navIdx.addProjectModel(projectModel); for (ProjectModel childProject : projectModel.getChildProjects()) { if (!Iterators.asSet(navIdx.getProjectModels()).contains(childProject)) addAllProjectModels(navIdx, childProject); } } }
reporting/api/src/main/java/org/jboss/windup/reporting/rules/CreateApplicationReportIndexRuleProvider.java
package org.jboss.windup.reporting.rules; import org.jboss.forge.furnace.util.Iterators; import org.jboss.windup.config.AbstractRuleProvider; import org.jboss.windup.config.GraphRewrite; import org.jboss.windup.config.metadata.MetadataBuilder; import org.jboss.windup.config.operation.iteration.AbstractIterationOperation; import org.jboss.windup.config.phase.PreReportGenerationPhase; import org.jboss.windup.config.query.Query; import org.jboss.windup.graph.GraphContext; import org.jboss.windup.graph.model.ProjectModel; import org.jboss.windup.graph.model.WindupConfigurationModel; import org.jboss.windup.graph.model.resource.FileModel; import org.jboss.windup.reporting.model.ApplicationReportIndexModel; import org.jboss.windup.reporting.service.ApplicationReportIndexService; import org.jboss.windup.util.exception.WindupException; import org.ocpsoft.rewrite.config.ConditionBuilder; import org.ocpsoft.rewrite.config.Configuration; import org.ocpsoft.rewrite.config.ConfigurationBuilder; import org.ocpsoft.rewrite.context.EvaluationContext; /** * Creates an index that can be used to register reports related to an application. * * @author <a href="mailto:[email protected]">Jesse Sightler</a> * */ public class CreateApplicationReportIndexRuleProvider extends AbstractRuleProvider { public CreateApplicationReportIndexRuleProvider() { super(MetadataBuilder.forProvider(CreateApplicationReportIndexRuleProvider.class) .setPhase(PreReportGenerationPhase.class)); } @Override public Configuration getConfiguration(GraphContext context) { ConditionBuilder applicationsFound = Query.fromType(WindupConfigurationModel.class); AbstractIterationOperation<WindupConfigurationModel> addApplicationReportIndex = new AbstractIterationOperation<WindupConfigurationModel>() { @Override public void perform(GraphRewrite event, EvaluationContext context, WindupConfigurationModel payload) { for (FileModel inputPath : payload.getInputPaths()) { ProjectModel projectModel = inputPath.getProjectModel(); if (projectModel == null) { throw new WindupException("Error, no project found in: " + inputPath.getFilePath()); } createApplicationReportIndex(event.getGraphContext(), projectModel); } } @Override public String toString() { return "AddApplicationReportIndex"; } }; return ConfigurationBuilder.begin() .addRule() .when(applicationsFound) .perform(addApplicationReportIndex); } /** * Create the index and associate it with all project models in the Application */ private ApplicationReportIndexModel createApplicationReportIndex(GraphContext context, ProjectModel applicationProjectModel) { ApplicationReportIndexService applicationReportIndexService = new ApplicationReportIndexService(context); ApplicationReportIndexModel index = applicationReportIndexService.create(); addAllProjectModels(index, applicationProjectModel); return index; } /** * Attach all project models within the application to the index. This will make it easy to navigate from the * projectModel to the application index. */ private void addAllProjectModels(ApplicationReportIndexModel navIdx, ProjectModel projectModel) { navIdx.addProjectModel(projectModel); for (ProjectModel childProject : projectModel.getChildProjects()) { if (!Iterators.asSet(navIdx.getProjectModels()).contains(childProject)) addAllProjectModels(navIdx, childProject); } } }
CreateApplicationReportIndexRuleProvider javadoc
reporting/api/src/main/java/org/jboss/windup/reporting/rules/CreateApplicationReportIndexRuleProvider.java
CreateApplicationReportIndexRuleProvider javadoc
Java
lgpl-2.1
a2c986508d49ee8ef7dd5285c7aa5179460ba6b7
0
xwiki/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.xwiki.mail; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.UUID; import javax.inject.Provider; import javax.mail.Session; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.xwiki.component.manager.ComponentManager; import org.xwiki.component.util.DefaultParameterizedType; import org.xwiki.context.Execution; import org.xwiki.context.ExecutionContext; import org.xwiki.mail.internal.DefaultMailResult; import org.xwiki.mail.internal.MemoryMailListener; import org.xwiki.mail.internal.UpdateableMailStatusResult; import org.xwiki.mail.script.MailStorageScriptService; import org.xwiki.mail.script.ScriptMailResult; import org.xwiki.security.authorization.ContextualAuthorizationManager; import org.xwiki.security.authorization.Right; import org.xwiki.test.annotation.ComponentList; import org.xwiki.test.mockito.MockitoComponentMockingRule; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.when; /** * Unit tests for {@link MailStorageScriptService}. * * @version $Id$ * @since 6.4 */ @ComponentList({ MemoryMailListener.class }) public class MailStorageScriptServiceTest { @Rule public MockitoComponentMockingRule<MailStorageScriptService> mocker = new MockitoComponentMockingRule<>(MailStorageScriptService.class); @Before public void setUp() throws Exception { Provider<ComponentManager> componentManagerProvider = this.mocker.registerMockComponent( new DefaultParameterizedType(null, Provider.class, ComponentManager.class), "context"); when(componentManagerProvider.get()).thenReturn(this.mocker); Execution execution = this.mocker.getInstance(Execution.class); ExecutionContext executionContext = new ExecutionContext(); when(execution.getContext()).thenReturn(executionContext); } @Test public void resendWhenDatabaseListenerNotFound() throws Exception { ScriptMailResult result = this.mocker.getComponentUnderTest().resend("batchId", "messageId"); assertNull(result); assertEquals("Can't find descriptor for the component [role = [interface org.xwiki.mail.MailListener] " + "hint = [database]]", this.mocker.getComponentUnderTest().getLastError().getMessage()); } @Test public void resendWhenMailContentStoreLoadingFails() throws Exception { this.mocker.registerComponent(MailListener.class, "database", this.mocker.getInstance(MailListener.class, "memory")); MailContentStore contentStore = this.mocker.getInstance(MailContentStore.class, "filesystem"); when(contentStore.load(any(), eq("batchId"), eq("messageId"))).thenThrow( new MailStoreException("error")); ScriptMailResult result = this.mocker.getComponentUnderTest().resend("batchId", "messageId"); assertNull(result); assertEquals("error", this.mocker.getComponentUnderTest().getLastError().getMessage()); } @Test public void resend() throws Exception { MemoryMailListener memoryMailListener = this.mocker.getInstance(MailListener.class, "memory"); this.mocker.registerComponent(MailListener.class, "database", memoryMailListener); Session session = Session.getInstance(new Properties()); ExtendedMimeMessage message = new ExtendedMimeMessage(); String batchId = UUID.randomUUID().toString(); MailContentStore contentStore = this.mocker.getInstance(MailContentStore.class, "filesystem"); when(contentStore.load(any(), eq(batchId), eq("messageId"))).thenReturn(message); MailSender sender = this.mocker.getInstance(MailSender.class); when(sender.sendAsynchronously(eq(Arrays.asList(message)), any(), same(memoryMailListener))) .thenReturn(new DefaultMailResult(batchId)); // Since resend() will wait indefinitely for the message count to be correct, we need to configure it here // as we're mocking the MailSender. ((UpdateableMailStatusResult) memoryMailListener.getMailStatusResult()).setTotalSize(1); ((UpdateableMailStatusResult) memoryMailListener.getMailStatusResult()).incrementCurrentSize(); ScriptMailResult result = this.mocker.getComponentUnderTest().resend(batchId, "messageId"); assertNotNull(result); assertEquals(batchId, result.getBatchId()); } @Test public void loadWhenNotAuthorized() throws Exception { ContextualAuthorizationManager authorizationManager = this.mocker.getInstance(ContextualAuthorizationManager.class); when(authorizationManager.hasAccess(Right.ADMIN)).thenReturn(false); List<MailStatus> result = this.mocker.getComponentUnderTest().load( Collections.<String, Object>emptyMap(), 0, 0, null, false); assertNull(result); assertEquals("You need Admin rights to load mail statuses", this.mocker.getComponentUnderTest().getLastError().getMessage()); } }
xwiki-platform-core/xwiki-platform-mail/xwiki-platform-mail-send/xwiki-platform-mail-send-storage/src/test/java/org/xwiki/mail/MailStorageScriptServiceTest.java
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.xwiki.mail; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.UUID; import javax.inject.Provider; import javax.mail.Session; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.xwiki.component.manager.ComponentManager; import org.xwiki.component.util.DefaultParameterizedType; import org.xwiki.context.Execution; import org.xwiki.context.ExecutionContext; import org.xwiki.mail.internal.DefaultMailResult; import org.xwiki.mail.internal.MemoryMailListener; import org.xwiki.mail.internal.UpdateableMailStatusResult; import org.xwiki.mail.script.MailStorageScriptService; import org.xwiki.mail.script.ScriptMailResult; import org.xwiki.security.authorization.ContextualAuthorizationManager; import org.xwiki.security.authorization.Right; import org.xwiki.test.annotation.ComponentList; import org.xwiki.test.mockito.MockitoComponentMockingRule; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.when; /** * Unit tests for {@link MailStorageScriptService}. * * @version $Id$ * @since 6.4 */ @ComponentList({ MemoryMailListener.class }) public class MailStorageScriptServiceTest { @Rule public MockitoComponentMockingRule<MailStorageScriptService> mocker = new MockitoComponentMockingRule<>(MailStorageScriptService.class); @Before public void setUp() throws Exception { Provider<ComponentManager> componentManagerProvider = this.mocker.registerMockComponent( new DefaultParameterizedType(null, Provider.class, ComponentManager.class), "context"); when(componentManagerProvider.get()).thenReturn(this.mocker); Execution execution = this.mocker.getInstance(Execution.class); ExecutionContext executionContext = new ExecutionContext(); when(execution.getContext()).thenReturn(executionContext); } @Test public void resendWhenDatabaseListenerNotFound() throws Exception { ScriptMailResult result = this.mocker.getComponentUnderTest().resend("batchId", "messageId"); assertNull(result); assertEquals("Can't find descriptor for the component [role = [interface org.xwiki.mail.MailListener] " + "hint = [database]]", this.mocker.getComponentUnderTest().getLastError().getMessage()); } @Test public void resendWhenMailContentStoreLoadingFails() throws Exception { this.mocker.registerComponent(MailListener.class, "database", this.mocker.getInstance(MailListener.class, "memory")); MailContentStore contentStore = this.mocker.getInstance(MailContentStore.class, "filesystem"); when(contentStore.load(any(Session.class), eq("batchId"), eq("messageId"))).thenThrow( new MailStoreException("error")); ScriptMailResult result = this.mocker.getComponentUnderTest().resend("batchId", "messageId"); assertNull(result); assertEquals("error", this.mocker.getComponentUnderTest().getLastError().getMessage()); } @Test public void resend() throws Exception { MemoryMailListener memoryMailListener = this.mocker.getInstance(MailListener.class, "memory"); this.mocker.registerComponent(MailListener.class, "database", memoryMailListener); Session session = Session.getInstance(new Properties()); ExtendedMimeMessage message = new ExtendedMimeMessage(); String batchId = UUID.randomUUID().toString(); MailContentStore contentStore = this.mocker.getInstance(MailContentStore.class, "filesystem"); when(contentStore.load(any(Session.class), eq(batchId), eq("messageId"))).thenReturn(message); MailSender sender = this.mocker.getInstance(MailSender.class); when(sender.sendAsynchronously(eq(Arrays.asList(message)), any(Session.class), same(memoryMailListener))).thenReturn(new DefaultMailResult(batchId)); // Since resend() will wait indefinitely for the message count to be correct, we need to configure it here // as we're mocking the MailSender. ((UpdateableMailStatusResult) memoryMailListener.getMailStatusResult()).setTotalSize(1); ((UpdateableMailStatusResult) memoryMailListener.getMailStatusResult()).incrementCurrentSize(); ScriptMailResult result = this.mocker.getComponentUnderTest().resend(batchId, "messageId"); assertNotNull(result); assertEquals(batchId, result.getBatchId()); } @Test public void loadWhenNotAuthorized() throws Exception { ContextualAuthorizationManager authorizationManager = this.mocker.getInstance(ContextualAuthorizationManager.class); when(authorizationManager.hasAccess(Right.ADMIN)).thenReturn(false); List<MailStatus> result = this.mocker.getComponentUnderTest().load( Collections.<String, Object>emptyMap(), 0, 0, null, false); assertNull(result); assertEquals("You need Admin rights to load mail statuses", this.mocker.getComponentUnderTest().getLastError().getMessage()); } }
XCOMMONS-1081: Upgrade to Mockito 2.2.15
xwiki-platform-core/xwiki-platform-mail/xwiki-platform-mail-send/xwiki-platform-mail-send-storage/src/test/java/org/xwiki/mail/MailStorageScriptServiceTest.java
XCOMMONS-1081: Upgrade to Mockito 2.2.15
Java
lgpl-2.1
081c1c0cad05785a018b9c76664e30cc467262bf
0
clienthax/ForgeGradle,kenzierocks/ForgeGradle,Barteks2x/ForgeGradle,nallar/ForgeGradle,kashike/ForgeGradle,killjoy1221/ForgeGradle,simon816/ForgeGradle,PaperMC/PaperGradle,RX14/ForgeGradle,mattparizeau/ForgeGradle,matthewprenger/ForgeGradle
package net.minecraftforge.gradle.user.patch; import static net.minecraftforge.gradle.common.Constants.JAR_MERGED; import static net.minecraftforge.gradle.user.UserConstants.CLASSIFIER_DECOMPILED; import static net.minecraftforge.gradle.user.UserConstants.CONFIG_MC; import static net.minecraftforge.gradle.user.patch.UserPatchConstants.*; import groovy.lang.Closure; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import net.minecraftforge.gradle.delayed.DelayedFile; import net.minecraftforge.gradle.tasks.ProcessJarTask; import net.minecraftforge.gradle.tasks.ProcessSrcJarTask; import net.minecraftforge.gradle.tasks.RemapSourcesTask; import net.minecraftforge.gradle.tasks.user.ApplyBinPatchesTask; import net.minecraftforge.gradle.user.UserBasePlugin; import org.gradle.api.Action; import org.gradle.api.Project; import org.gradle.api.execution.TaskExecutionGraph; import org.gradle.api.plugins.JavaPluginConvention; import org.gradle.api.tasks.SourceSet; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; public abstract class UserPatchBasePlugin extends UserBasePlugin<UserPackExtension> { @SuppressWarnings({ "serial", "unchecked", "rawtypes" }) @Override public void applyPlugin() { super.applyPlugin(); // add the binPatching task { ApplyBinPatchesTask task = makeTask("applyBinPatches", ApplyBinPatchesTask.class); task.setInJar(delayedFile(JAR_MERGED)); task.setOutJar(delayedFile(JAR_BINPATCHED)); task.setPatches(delayedFile(BINPATCHES)); task.setClassesJar(delayedFile(BINARIES_JAR)); task.setResources(delayedFileTree(RES_DIR)); task.dependsOn("mergeJars"); project.getTasks().getByName("deobfBinJar").dependsOn(task); ProcessJarTask deobf = (ProcessJarTask) project.getTasks().getByName("deobfBinJar").dependsOn(task);; deobf.setInJar(delayedFile(JAR_BINPATCHED)); deobf.dependsOn(task); } // add source patching task { DelayedFile decompOut = delayedDirtyFile(null, CLASSIFIER_DECOMPILED, "jar"); DelayedFile processed = delayedDirtyFile(null, CLASSIFIER_PATCHED, "jar"); ProcessSrcJarTask patch = makeTask("processSources", ProcessSrcJarTask.class); patch.dependsOn("decompile"); patch.setInJar(decompOut); patch.setOutJar(processed); configurePatching(patch); RemapSourcesTask remap = (RemapSourcesTask) project.getTasks().getByName("remapJar"); remap.setInJar(processed); remap.dependsOn(patch); } // add special handling here. // stop people screwing stuff up. project.getGradle().getTaskGraph().whenReady(new Closure<Object>(this, null) { @Override public Object call() { TaskExecutionGraph graph = project.getGradle().getTaskGraph(); String path = project.getPath(); if (graph.hasTask(path + "setupDecompWorkspace")) { getExtension().setDecomp(); setMinecraftDeps(true, false); } return null; } @Override public Object call(Object obj) { return call(); } @Override public Object call(Object... obj) { return call(); } }); // configure eclipse task to do extra stuff. project.getTasks().getByName("eclipse").doLast(new Action() { @Override public void execute(Object arg0) { File f = new File(ECLIPSE_LOCATION); if (f.exists())// && f.length() == 0) { String projectDir = "URI//" + project.getProjectDir().toURI().toString(); try { byte[] LOCATION_BEFORE = new byte[] { 0x40, (byte) 0xB1, (byte) 0x8B, (byte) 0x81, 0x23, (byte) 0xBC, 0x00, 0x14, 0x1A, 0x25, (byte) 0x96, (byte) 0xE7, (byte) 0xA3, (byte) 0x93, (byte) 0xBE, 0x1E }; byte[] LOCATION_AFTER = new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, (byte) 0xC0, 0x58, (byte) 0xFB, (byte) 0xF3, 0x23, (byte) 0xBC, 0x00, 0x14, 0x1A, 0x51, (byte) 0xF3, (byte) 0x8C, 0x7B, (byte) 0xBB, 0x77, (byte) 0xC6 }; FileOutputStream fos = new FileOutputStream(f); fos.write(LOCATION_BEFORE); //Unknown but w/e fos.write((byte) ((projectDir.length() & 0xFF) >> 8)); fos.write((byte) ((projectDir.length() & 0xFF) >> 0)); fos.write(projectDir.getBytes()); fos.write(LOCATION_AFTER); //Unknown but w/e fos.close(); } catch (IOException e) { e.printStackTrace(); } } } }); } /** * Allows for the configuration of tasks in AfterEvaluate */ protected void delayedTaskConfig() { // add src ATs ProcessJarTask binDeobf = (ProcessJarTask) project.getTasks().getByName("deobfBinJar"); ProcessJarTask decompDeobf = (ProcessJarTask) project.getTasks().getByName("deobfuscateJar"); // ATs from the ExtensionObject Object[] extAts = getExtension().getAccessTransformers().toArray(); binDeobf.addTransformer(extAts); decompDeobf.addTransformer(extAts); // from the resources dirs { JavaPluginConvention javaConv = (JavaPluginConvention) project.getConvention().getPlugins().get("java"); SourceSet main = javaConv.getSourceSets().getByName("main"); SourceSet api = javaConv.getSourceSets().getByName("api"); for (File at : main.getResources().getFiles()) { if (at.getName().toLowerCase().endsWith("_at.cfg")) { project.getLogger().lifecycle("Found AccessTransformer in main resources: " + at.getName()); binDeobf.addTransformer(at); decompDeobf.addTransformer(at); } } for (File at : api.getResources().getFiles()) { if (at.getName().toLowerCase().endsWith("_at.cfg")) { project.getLogger().lifecycle("Found AccessTransformer in api resources: " + at.getName()); binDeobf.addTransformer(at); decompDeobf.addTransformer(at); } } } super.delayedTaskConfig(); // add MC repo. final String repoDir = delayedDirtyFile("this", "doesnt", "matter").call().getParentFile().getAbsolutePath(); project.allprojects(new Action<Project>() { public void execute(Project proj) { addFlatRepo(proj, getApiName()+"FlatRepo", repoDir); proj.getLogger().info("Adding repo to " + proj.getPath() + " >> " + repoDir); } }); } @Override protected void configurePostDecomp(boolean decomp) { super.configurePostDecomp(decomp); // set MC deps setMinecraftDeps(decomp, false); } private final void setMinecraftDeps(boolean decomp, boolean remove) { String version = getApiVersion(getExtension()); if (decomp) { project.getDependencies().add(CONFIG_MC, ImmutableMap.of("name", getSrcDepName(), "version", version)); if (remove) { project.getConfigurations().getByName(CONFIG_MC).exclude(ImmutableMap.of("module", getBinDepName())); } } else { project.getDependencies().add(CONFIG_MC, ImmutableMap.of("name", getBinDepName(), "version", version)); if (remove) { project.getConfigurations().getByName(CONFIG_MC).exclude(ImmutableMap.of("module", getSrcDepName())); } } } @Override protected DelayedFile getDevJson() { return delayedFile(JSON); } @Override protected String getSrcDepName() { return getApiName() + "Src"; } @Override protected String getBinDepName() { return getApiName() + "Bin"; } @Override protected boolean hasApiVersion() { return true; } @Override protected String getApiCacheDir(UserPackExtension exten) { return "{CACHE_DIR}/minecraft/"+getApiGroup().replace('.', '/') + "/{API_NAME}/{API_VERSION}"; } @Override protected String getUserDev() { return getApiGroup() + ":{API_NAME}:{API_VERSION}"; } @Override protected Class<UserPackExtension> getExtensionClass() { return UserPackExtension.class; } @Override protected String getApiVersion(UserPackExtension exten) { return exten.getApiVersion(); } @Override protected String getMcVersion(UserPackExtension exten) { return exten.getVersion(); } @Override protected String getClientRunClass() { return "net.minecraft.launchwrapper.Launch"; } @Override protected Iterable<String> getClientRunArgs() { return ImmutableList.of("--version", "1.7", "--tweakClass", "cpw.mods.fml.common.launcher.FMLTweaker", "--username=ForgeDevName", "--accessToken FML"); } @Override protected String getServerRunClass() { return "cpw.mods.fml.relauncher.ServerLaunchWrapper"; } @Override protected Iterable<String> getServerRunArgs() { return new ArrayList<String>(0); } /** * Add in the desired patching stages. * This happens during normal evaluation, and NOT AfterEvaluate. * @param patch */ protected abstract void configurePatching(ProcessSrcJarTask patch); /** * Should be with seperate with periods. */ protected abstract String getApiGroup(); }
src/main/java/net/minecraftforge/gradle/user/patch/UserPatchBasePlugin.java
package net.minecraftforge.gradle.user.patch; import static net.minecraftforge.gradle.common.Constants.JAR_MERGED; import static net.minecraftforge.gradle.user.UserConstants.CLASSIFIER_DECOMPILED; import static net.minecraftforge.gradle.user.UserConstants.CONFIG_MC; import static net.minecraftforge.gradle.user.patch.UserPatchConstants.*; import groovy.lang.Closure; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import net.minecraftforge.gradle.delayed.DelayedFile; import net.minecraftforge.gradle.tasks.ProcessJarTask; import net.minecraftforge.gradle.tasks.ProcessSrcJarTask; import net.minecraftforge.gradle.tasks.RemapSourcesTask; import net.minecraftforge.gradle.tasks.user.ApplyBinPatchesTask; import net.minecraftforge.gradle.user.UserBasePlugin; import org.gradle.api.Action; import org.gradle.api.Project; import org.gradle.api.execution.TaskExecutionGraph; import org.gradle.api.plugins.JavaPluginConvention; import org.gradle.api.tasks.SourceSet; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; public abstract class UserPatchBasePlugin extends UserBasePlugin<UserPackExtension> { @SuppressWarnings({ "serial", "unchecked", "rawtypes" }) @Override public void applyPlugin() { super.applyPlugin(); // add the binPatching task { ApplyBinPatchesTask task = makeTask("applyBinPatches", ApplyBinPatchesTask.class); task.setInJar(delayedFile(JAR_MERGED)); task.setOutJar(delayedFile(JAR_BINPATCHED)); task.setPatches(delayedFile(BINPATCHES)); task.setClassesJar(delayedFile(BINARIES_JAR)); task.setResources(delayedFileTree(RES_DIR)); task.dependsOn("mergeJars"); project.getTasks().getByName("deobfBinJar").dependsOn(task); ProcessJarTask deobf = (ProcessJarTask) project.getTasks().getByName("deobfBinJar").dependsOn(task);; deobf.setInJar(delayedFile(JAR_BINPATCHED)); deobf.dependsOn(task); } // add source patching task { DelayedFile decompOut = delayedDirtyFile(null, CLASSIFIER_DECOMPILED, "jar"); DelayedFile processed = delayedDirtyFile(null, CLASSIFIER_PATCHED, "jar"); ProcessSrcJarTask patch = makeTask("processSources", ProcessSrcJarTask.class); patch.dependsOn("decompile"); patch.setInJar(decompOut); patch.setOutJar(processed); configurePatching(patch); RemapSourcesTask remap = (RemapSourcesTask) project.getTasks().getByName("remapJar"); remap.setInJar(processed); remap.dependsOn(patch); } // add special handling here. // stop people screwing stuff up. project.getGradle().getTaskGraph().whenReady(new Closure<Object>(this, null) { @Override public Object call() { TaskExecutionGraph graph = project.getGradle().getTaskGraph(); String path = project.getPath(); if (graph.hasTask(path + "setupDecompWorkspace")) { getExtension().setDecomp(); setMinecraftDeps(true, false); } return null; } @Override public Object call(Object obj) { return call(); } @Override public Object call(Object... obj) { return call(); } }); // configure eclipse task to do extra stuff. project.getTasks().getByName("eclipse").doLast(new Action() { @Override public void execute(Object arg0) { File f = new File(ECLIPSE_LOCATION); if (f.exists())// && f.length() == 0) { String projectDir = "URI//" + project.getProjectDir().toURI().toString(); try { byte[] LOCATION_BEFORE = new byte[] { 0x40, (byte) 0xB1, (byte) 0x8B, (byte) 0x81, 0x23, (byte) 0xBC, 0x00, 0x14, 0x1A, 0x25, (byte) 0x96, (byte) 0xE7, (byte) 0xA3, (byte) 0x93, (byte) 0xBE, 0x1E }; byte[] LOCATION_AFTER = new byte[] { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, (byte) 0xC0, 0x58, (byte) 0xFB, (byte) 0xF3, 0x23, (byte) 0xBC, 0x00, 0x14, 0x1A, 0x51, (byte) 0xF3, (byte) 0x8C, 0x7B, (byte) 0xBB, 0x77, (byte) 0xC6 }; FileOutputStream fos = new FileOutputStream(f); fos.write(LOCATION_BEFORE); //Unknown but w/e fos.write((byte) ((projectDir.length() & 0xFF) >> 8)); fos.write((byte) ((projectDir.length() & 0xFF) >> 0)); fos.write(projectDir.getBytes()); fos.write(LOCATION_AFTER); //Unknown but w/e fos.close(); } catch (IOException e) { e.printStackTrace(); } } } }); } /** * Allows for the configuration of tasks in AfterEvaluate */ protected void delayedTaskConfig() { // add src ATs ProcessJarTask binDeobf = (ProcessJarTask) project.getTasks().getByName("deobfBinJar"); ProcessJarTask decompDeobf = (ProcessJarTask) project.getTasks().getByName("deobfuscateJar"); // ATs from the ExtensionObject Object[] extAts = getExtension().getAccessTransformers().toArray(); binDeobf.addTransformer(extAts); decompDeobf.addTransformer(extAts); // from the resources dirs { JavaPluginConvention javaConv = (JavaPluginConvention) project.getConvention().getPlugins().get("java"); SourceSet main = javaConv.getSourceSets().getByName("main"); SourceSet api = javaConv.getSourceSets().getByName("api"); for (File at : main.getResources().getFiles()) { if (at.getName().toLowerCase().endsWith("_at.cfg")) { project.getLogger().lifecycle("Found AccessTransformer in main resources: " + at.getName()); binDeobf.addTransformer(at); decompDeobf.addTransformer(at); } } for (File at : api.getResources().getFiles()) { if (at.getName().toLowerCase().endsWith("_at.cfg")) { project.getLogger().lifecycle("Found AccessTransformer in api resources: " + at.getName()); binDeobf.addTransformer(at); decompDeobf.addTransformer(at); } } } super.delayedTaskConfig(); // add MC repo. final String repoDir = delayedDirtyFile("this", "doesnt", "matter").call().getParentFile().getAbsolutePath(); project.allprojects(new Action<Project>() { public void execute(Project proj) { addFlatRepo(proj, getApiName()+"FlatRepo", repoDir); proj.getLogger().info("Adding repo to " + proj.getPath() + " >> " + repoDir); } }); } @Override protected void configurePostDecomp(boolean decomp) { super.configurePostDecomp(decomp); // set MC deps setMinecraftDeps(decomp, false); } private final void setMinecraftDeps(boolean decomp, boolean remove) { String version = getApiVersion(getExtension()); if (decomp) { project.getDependencies().add(CONFIG_MC, ImmutableMap.of("name", getSrcDepName(), "version", version)); if (remove) { project.getConfigurations().getByName(CONFIG_MC).exclude(ImmutableMap.of("module", getBinDepName())); } } else { project.getDependencies().add(CONFIG_MC, ImmutableMap.of("name", getBinDepName(), "version", version)); if (remove) { project.getConfigurations().getByName(CONFIG_MC).exclude(ImmutableMap.of("module", getSrcDepName())); } } } @Override protected DelayedFile getDevJson() { return delayedFile(JSON); } @Override protected String getSrcDepName() { return getApiName() + "Src"; } @Override protected String getBinDepName() { return getApiName() + "Bin"; } @Override protected boolean hasApiVersion() { return true; } @Override protected String getApiCacheDir(UserPackExtension exten) { return "{CACHE_DIR}/minecraft/{API_NAME}/"+getApiGroup().replace('.', '/') + "/{API_NAME}/{API_VERSION}"; } @Override protected String getUserDev() { return getApiGroup() + ":{API_NAME}:{API_VERSION}"; } @Override protected Class<UserPackExtension> getExtensionClass() { return UserPackExtension.class; } @Override protected String getApiVersion(UserPackExtension exten) { return exten.getApiVersion(); } @Override protected String getMcVersion(UserPackExtension exten) { return exten.getVersion(); } @Override protected String getClientRunClass() { return "net.minecraft.launchwrapper.Launch"; } @Override protected Iterable<String> getClientRunArgs() { return ImmutableList.of("--version", "1.7", "--tweakClass", "cpw.mods.fml.common.launcher.FMLTweaker", "--username=ForgeDevName", "--accessToken FML"); } @Override protected String getServerRunClass() { return "cpw.mods.fml.relauncher.ServerLaunchWrapper"; } @Override protected Iterable<String> getServerRunArgs() { return new ArrayList<String>(0); } /** * Add in the desired patching stages. * This happens during normal evaluation, and NOT AfterEvaluate. * @param patch */ protected abstract void configurePatching(ProcessSrcJarTask patch); /** * Should be with seperate with periods. */ protected abstract String getApiGroup(); }
fixed a pretty serious cache typo
src/main/java/net/minecraftforge/gradle/user/patch/UserPatchBasePlugin.java
fixed a pretty serious cache typo
Java
lgpl-2.1
0a1a9c8cbec3899a9e06cc666d11517e47a24d3d
0
julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine
package org.intermine.common.swing; /* * Copyright (C) 2002-2010 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.awt.Container; import java.awt.Dialog; import java.awt.Dimension; import java.awt.Frame; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.Window; import java.awt.event.ActionEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.MissingResourceException; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.Box; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JScrollPane; import javax.swing.JTextArea; /** * Progress dialog displaying the output from the system process of a * <code>SystemProcessSwingWorker</code>. This dialog presents one or two * read-only text areas that display the standard output and where relevant * error output from the child process. It also provides an "Ok" button, available * once the process has finished, and a "cancel" button that will attempt to * cancel the process. */ public class SystemProcessProgressDialog extends StandardJDialog { private static final long serialVersionUID = -367426748732877748L; /** * Label displaying information text. * @serial */ protected JLabel informationLabel = new JLabel(); /** * The text area for standard output. * @serial */ protected JTextArea outputArea = new JTextArea(); /** * The scroll pane around <code>outputArea</code>. * @serial */ protected JScrollPane outputAreaScrollPane = new JScrollPane(outputArea); /** * The text area for error output. * @serial */ protected JTextArea errorArea = new JTextArea(); /** * The scroll pane around <code>errorArea</code>. * @serial */ protected JScrollPane errorAreaScrollPane = new JScrollPane(errorArea); /** * The "ok" action. * @serial */ protected Action okAction = new OkAction(); /** * The "cancel" action. * @serial */ protected Action cancelAction = new CancelProcessAction(); /** * The SystemProcessSwingWorker currently being watched. */ protected transient SystemProcessSwingWorker worker; /** * The listener for events from <code>worker</code>. * @serial */ protected PropertyChangeListener listener = new WorkerListener(); /** * Initialise with the given parent Dialog. * * @param owner The parent Dialog. * * @see javax.swing.JDialog#JDialog(Dialog) */ public SystemProcessProgressDialog(Dialog owner) { super(owner); init(); } /** * Initialise with the given parent Frame. * * @param owner The parent Frame. * * @see javax.swing.JDialog#JDialog(Frame) */ public SystemProcessProgressDialog(Frame owner) { super(owner); init(); } /** * Initialise with the given parent Window. * * @param owner The parent Window. * * @see javax.swing.JDialog#JDialog(Window) */ public SystemProcessProgressDialog(Window owner) { super(owner); init(); } /** * Internal initialisation method. Lays out the child components. */ private void init() { Dimension preferredSize = new Dimension(600, 320); outputArea.setEditable(false); outputAreaScrollPane.setPreferredSize(preferredSize); errorArea.setEditable(false); errorAreaScrollPane.setPreferredSize(preferredSize); JComponent buttonBox = initButtons(); Container cp = getContentPane(); cp.setLayout(new GridBagLayout()); GridBagConstraints cons = new GridBagConstraints(); cons.gridx = 0; cons.gridy = 1; cons.gridheight = 1; cons.gridwidth = 1; cons.fill = GridBagConstraints.HORIZONTAL; cons.weightx = 1; cons.weighty = 0; cons.insets = new Insets(4, 4, 4, 4); cp.add(informationLabel, cons); cons.gridy++; cons.fill = GridBagConstraints.BOTH; cons.weighty = 0.5; cp.add(outputAreaScrollPane, cons); outputArea.setFont(new Font("monospaced", Font.PLAIN, 10)); cons.gridy++; cp.add(errorAreaScrollPane, cons); errorArea.setFont(new Font("monospaced", Font.PLAIN, 10)); cons.gridy++; cons.fill = GridBagConstraints.HORIZONTAL; cons.weighty = 0; cp.add(buttonBox, cons); pack(); } /** * Initialise a component that contains the buttons relevant to this * dialog. This implementation adds "ok" and "cancel" buttons, but this * can be overridden to add any buttons required. * * @return The JComponent containing the action buttons. */ protected JComponent initButtons() { Box buttonBox = Box.createHorizontalBox(); buttonBox.add(Box.createHorizontalGlue()); buttonBox.add(new JButton(okAction)); buttonBox.add(Box.createHorizontalStrut(8)); buttonBox.add(new JButton(cancelAction)); buttonBox.add(Box.createHorizontalGlue()); return buttonBox; } /** * Set the text on the information label. * * @param message The text to display. */ public void setInformationLabel(String message) { informationLabel.setText(message); } /** * Set the SystemProcessSwingWorker being watched. * * @param worker The SystemProcessSwingWorker. */ public void setWorker(SystemProcessSwingWorker worker) { if (this.worker != null) { this.worker.removePropertyChangeListener(listener); } this.worker = worker; resetOutput(); okAction.setEnabled(false); cancelAction.setEnabled(false); if (worker != null) { boolean changed = errorAreaScrollPane.isVisible() == worker.areStreamsLinked(); if (changed) { errorAreaScrollPane.setVisible(!worker.areStreamsLinked()); pack(); } worker.addPropertyChangeListener(listener); } } /** * Clear the output text areas. */ public void resetOutput() { outputArea.setText(""); errorArea.setText(""); } /** * Write the given text to the standard output text area. This is additive: * the given text is appended to the text already displayed. * * @param text The text to append. */ public void writeOutput(String text) { outputArea.append(text); } /** * Write the given text to the error output text area. This is additive: * the given text is appended to the text already displayed. * * @param text The text to append. */ public void writeError(String text) { errorArea.append(text); } /** * Default "ok" action that simply hides this dialog when the action * is invoked. */ protected class OkAction extends AbstractAction { private static final long serialVersionUID = -8362658808102333841L; /** * Initialise this dialog with using the "ok" message key via * {@link Messages}. */ public OkAction() { String name; try { name = Messages.getMessage("ok"); } catch (MissingResourceException e) { name = "Ok"; } putValue(NAME, name); setEnabled(false); } /** * When the action is performed, hide the dialog. * * @param event The ActionEvent. */ @Override public void actionPerformed(ActionEvent event) { //setVisible(false); dispose(); } } /** * Default "cancel" action that calls <code>cancel</code> on the watched * SystemProcessSwingWorker. The cancel call is made allowing the worker * thread to be interrupted. * * @see java.util.concurrent.Future#cancel(boolean) */ protected class CancelProcessAction extends AbstractAction { private static final long serialVersionUID = 6168285542925861150L; /** * Initialise this dialog with using the "cancel" message key via * {@link Messages}. */ public CancelProcessAction() { String name; try { name = Messages.getMessage("cancel"); } catch (MissingResourceException e) { name = "Cancel"; } putValue(NAME, name); setEnabled(false); } /** * When the action is performed, call <code>cancel</code> on the watched * worker. * * @param event The ActionEvent. */ @Override public void actionPerformed(ActionEvent event) { worker.cancel(true); } } /** * Listener for properties fired from the watched SystemProcessSwingWorker. * Responds to the events thus: * * <ol> * <li><code>STARTED</code>: enables the cancel button and disables the ok button. * <li><code>OUTPUT</code>: appends the given String to the output text area. * <li><code>ERROR</code>: appends the given String to the error output text area. * <li><code>COMPLETE</code>: disables the cancel button and enables the ok button. * </ol> */ private class WorkerListener implements PropertyChangeListener { /** * Listener method. * * @param event The PropertyChangeEvent from the watched SystemProcessSwingWorker. */ @Override public void propertyChange(PropertyChangeEvent event) { if (SystemProcessSwingWorker.OUTPUT.equals(event.getPropertyName())) { String output = (String) event.getNewValue(); outputArea.append(output); outputArea.repaint(); } else if (SystemProcessSwingWorker.ERROR.equals(event.getPropertyName())) { String output = (String) event.getNewValue(); errorArea.append(output); errorArea.repaint(); } else if (SystemProcessSwingWorker.STARTED.equals(event.getPropertyName())) { cancelAction.setEnabled(true); okAction.setEnabled(false); } else if (SystemProcessSwingWorker.COMPLETE.equals(event.getPropertyName())) { cancelAction.setEnabled(false); okAction.setEnabled(true); worker.removePropertyChangeListener(this); worker = null; } } } }
intermine/MineManager/common/src/main/java/org/intermine/common/swing/SystemProcessProgressDialog.java
package org.intermine.common.swing; /* * Copyright (C) 2002-2010 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.awt.Container; import java.awt.Dialog; import java.awt.Dimension; import java.awt.Frame; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.Window; import java.awt.event.ActionEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.MissingResourceException; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.Box; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JScrollPane; import javax.swing.JTextArea; /** * Progress dialog displaying the output from the system process of a * <code>SystemProcessSwingWorker</code>. This dialog presents one or two * read-only text areas that display the standard output and where relevant * error output from the child process. It also provides an "Ok" button, available * once the process has finished, and a "cancel" button that will attempt to * cancel the process. */ public class SystemProcessProgressDialog extends StandardJDialog { private static final long serialVersionUID = -367426748732877748L; /** * Label displaying information text. * @serial */ protected JLabel informationLabel = new JLabel(); /** * The text area for standard output. * @serial */ protected JTextArea outputArea = new JTextArea(); /** * The scroll pane around <code>outputArea</code>. * @serial */ protected JScrollPane outputAreaScrollPane = new JScrollPane(outputArea); /** * The text area for error output. * @serial */ protected JTextArea errorArea = new JTextArea(); /** * The scroll pane around <code>errorArea</code>. * @serial */ protected JScrollPane errorAreaScrollPane = new JScrollPane(errorArea); /** * The "ok" action. * @serial */ protected Action okAction = new OkAction(); /** * The "cancel" action. * @serial */ protected Action cancelAction = new CancelProcessAction(); /** * The SystemProcessSwingWorker currently being watched. */ protected transient SystemProcessSwingWorker worker; /** * The listener for events from <code>worker</code>. * @serial */ protected PropertyChangeListener listener = new WorkerListener(); /** * Initialise with the given parent Dialog. * * @param owner The parent Dialog. * * @see javax.swing.JDialog#JDialog(Dialog) */ public SystemProcessProgressDialog(Dialog owner) { super(owner); init(); } /** * Initialise with the given parent Frame. * * @param owner The parent Frame. * * @see javax.swing.JDialog#JDialog(Frame) */ public SystemProcessProgressDialog(Frame owner) { super(owner); init(); } /** * Initialise with the given parent Window. * * @param owner The parent Window. * * @see javax.swing.JDialog#JDialog(Window) */ public SystemProcessProgressDialog(Window owner) { super(owner); init(); } /** * Internal initialisation method. Lays out the child components. */ private void init() { Dimension preferredSize = new Dimension(600, 320); outputArea.setEditable(false); outputAreaScrollPane.setPreferredSize(preferredSize); errorArea.setEditable(false); errorAreaScrollPane.setPreferredSize(preferredSize); JComponent buttonBox = initButtons(); Container cp = getContentPane(); cp.setLayout(new GridBagLayout()); GridBagConstraints cons = new GridBagConstraints(); cons.gridx = 0; cons.gridy = 1; cons.gridheight = 1; cons.gridwidth = 1; cons.fill = GridBagConstraints.HORIZONTAL; cons.weightx = 1; cons.weighty = 0; cons.insets = new Insets(4, 4, 4, 4); cp.add(informationLabel, cons); cons.gridy++; cons.fill = GridBagConstraints.BOTH; cons.weighty = 0.5; cp.add(outputAreaScrollPane, cons); cons.gridy++; cp.add(errorAreaScrollPane, cons); cons.gridy++; cons.fill = GridBagConstraints.HORIZONTAL; cons.weighty = 0; cp.add(buttonBox, cons); pack(); } /** * Initialise a component that contains the buttons relevant to this * dialog. This implementation adds "ok" and "cancel" buttons, but this * can be overridden to add any buttons required. * * @return The JComponent containing the action buttons. */ protected JComponent initButtons() { Box buttonBox = Box.createHorizontalBox(); buttonBox.add(Box.createHorizontalGlue()); buttonBox.add(new JButton(okAction)); buttonBox.add(Box.createHorizontalStrut(8)); buttonBox.add(new JButton(cancelAction)); buttonBox.add(Box.createHorizontalGlue()); return buttonBox; } /** * Set the text on the information label. * * @param message The text to display. */ public void setInformationLabel(String message) { informationLabel.setText(message); } /** * Set the SystemProcessSwingWorker being watched. * * @param worker The SystemProcessSwingWorker. */ public void setWorker(SystemProcessSwingWorker worker) { if (this.worker != null) { this.worker.removePropertyChangeListener(listener); } this.worker = worker; resetOutput(); okAction.setEnabled(false); cancelAction.setEnabled(false); if (worker != null) { boolean changed = errorAreaScrollPane.isVisible() == worker.areStreamsLinked(); if (changed) { errorAreaScrollPane.setVisible(!worker.areStreamsLinked()); pack(); } worker.addPropertyChangeListener(listener); } } /** * Clear the output text areas. */ public void resetOutput() { outputArea.setText(""); errorArea.setText(""); } /** * Write the given text to the standard output text area. This is additive: * the given text is appended to the text already displayed. * * @param text The text to append. */ public void writeOutput(String text) { outputArea.append(text); } /** * Write the given text to the error output text area. This is additive: * the given text is appended to the text already displayed. * * @param text The text to append. */ public void writeError(String text) { errorArea.append(text); } /** * Default "ok" action that simply hides this dialog when the action * is invoked. */ protected class OkAction extends AbstractAction { private static final long serialVersionUID = -8362658808102333841L; /** * Initialise this dialog with using the "ok" message key via * {@link Messages}. */ public OkAction() { String name; try { name = Messages.getMessage("ok"); } catch (MissingResourceException e) { name = "Ok"; } putValue(NAME, name); setEnabled(false); } /** * When the action is performed, hide the dialog. * * @param event The ActionEvent. */ @Override public void actionPerformed(ActionEvent event) { setVisible(false); } } /** * Default "cancel" action that calls <code>cancel</code> on the watched * SystemProcessSwingWorker. The cancel call is made allowing the worker * thread to be interrupted. * * @see java.util.concurrent.Future#cancel(boolean) */ protected class CancelProcessAction extends AbstractAction { private static final long serialVersionUID = 6168285542925861150L; /** * Initialise this dialog with using the "cancel" message key via * {@link Messages}. */ public CancelProcessAction() { String name; try { name = Messages.getMessage("cancel"); } catch (MissingResourceException e) { name = "Cancel"; } putValue(NAME, name); setEnabled(false); } /** * When the action is performed, call <code>cancel</code> on the watched * worker. * * @param event The ActionEvent. */ @Override public void actionPerformed(ActionEvent event) { worker.cancel(true); } } /** * Listener for properties fired from the watched SystemProcessSwingWorker. * Responds to the events thus: * * <ol> * <li><code>STARTED</code>: enables the cancel button and disables the ok button. * <li><code>OUTPUT</code>: appends the given String to the output text area. * <li><code>ERROR</code>: appends the given String to the error output text area. * <li><code>COMPLETE</code>: disables the cancel button and enables the ok button. * </ol> */ private class WorkerListener implements PropertyChangeListener { /** * Listener method. * * @param event The PropertyChangeEvent from the watched SystemProcessSwingWorker. */ @Override public void propertyChange(PropertyChangeEvent event) { if (SystemProcessSwingWorker.OUTPUT.equals(event.getPropertyName())) { String output = (String) event.getNewValue(); outputArea.append(output); outputArea.repaint(); } else if (SystemProcessSwingWorker.ERROR.equals(event.getPropertyName())) { String output = (String) event.getNewValue(); errorArea.append(output); errorArea.repaint(); } else if (SystemProcessSwingWorker.STARTED.equals(event.getPropertyName())) { cancelAction.setEnabled(true); okAction.setEnabled(false); } else if (SystemProcessSwingWorker.COMPLETE.equals(event.getPropertyName())) { cancelAction.setEnabled(false); okAction.setEnabled(true); worker.removePropertyChangeListener(this); worker = null; } } } }
Changed font of text box to monospaced, and dispose window on close Former-commit-id: b5e0b4fe1972c6f39d5d9133f9dec94414482188
intermine/MineManager/common/src/main/java/org/intermine/common/swing/SystemProcessProgressDialog.java
Changed font of text box to monospaced, and dispose window on close
Java
apache-2.0
2279963187e83914fc9b21d9299d4b6f291301eb
0
openbaoz/titanium_mobile,peymanmortazavi/titanium_mobile,pinnamur/titanium_mobile,taoger/titanium_mobile,collinprice/titanium_mobile,cheekiatng/titanium_mobile,jhaynie/titanium_mobile,falkolab/titanium_mobile,FokkeZB/titanium_mobile,hieupham007/Titanium_Mobile,mano-mykingdom/titanium_mobile,collinprice/titanium_mobile,FokkeZB/titanium_mobile,collinprice/titanium_mobile,FokkeZB/titanium_mobile,AngelkPetkov/titanium_mobile,csg-coder/titanium_mobile,smit1625/titanium_mobile,falkolab/titanium_mobile,prop/titanium_mobile,indera/titanium_mobile,smit1625/titanium_mobile,pec1985/titanium_mobile,openbaoz/titanium_mobile,emilyvon/titanium_mobile,cheekiatng/titanium_mobile,jvkops/titanium_mobile,pinnamur/titanium_mobile,bhatfield/titanium_mobile,jvkops/titanium_mobile,openbaoz/titanium_mobile,prop/titanium_mobile,mano-mykingdom/titanium_mobile,bhatfield/titanium_mobile,sriks/titanium_mobile,openbaoz/titanium_mobile,prop/titanium_mobile,hieupham007/Titanium_Mobile,rblalock/titanium_mobile,formalin14/titanium_mobile,csg-coder/titanium_mobile,rblalock/titanium_mobile,perdona/titanium_mobile,pec1985/titanium_mobile,jvkops/titanium_mobile,KangaCoders/titanium_mobile,linearhub/titanium_mobile,collinprice/titanium_mobile,pec1985/titanium_mobile,kopiro/titanium_mobile,taoger/titanium_mobile,smit1625/titanium_mobile,pec1985/titanium_mobile,ashcoding/titanium_mobile,AngelkPetkov/titanium_mobile,pinnamur/titanium_mobile,emilyvon/titanium_mobile,hieupham007/Titanium_Mobile,prop/titanium_mobile,KangaCoders/titanium_mobile,jhaynie/titanium_mobile,falkolab/titanium_mobile,taoger/titanium_mobile,formalin14/titanium_mobile,openbaoz/titanium_mobile,pec1985/titanium_mobile,csg-coder/titanium_mobile,KoketsoMabuela92/titanium_mobile,jvkops/titanium_mobile,perdona/titanium_mobile,prop/titanium_mobile,collinprice/titanium_mobile,benbahrenburg/titanium_mobile,mvitr/titanium_mobile,formalin14/titanium_mobile,collinprice/titanium_mobile,mano-mykingdom/titanium_mobile,shopmium/titanium_mobile,bhatfield/titanium_mobile,pec1985/titanium_mobile,KangaCoders/titanium_mobile,jhaynie/titanium_mobile,AngelkPetkov/titanium_mobile,peymanmortazavi/titanium_mobile,pec1985/titanium_mobile,indera/titanium_mobile,cheekiatng/titanium_mobile,shopmium/titanium_mobile,peymanmortazavi/titanium_mobile,formalin14/titanium_mobile,bhatfield/titanium_mobile,pinnamur/titanium_mobile,jhaynie/titanium_mobile,shopmium/titanium_mobile,cheekiatng/titanium_mobile,mano-mykingdom/titanium_mobile,rblalock/titanium_mobile,shopmium/titanium_mobile,KoketsoMabuela92/titanium_mobile,openbaoz/titanium_mobile,shopmium/titanium_mobile,perdona/titanium_mobile,bhatfield/titanium_mobile,sriks/titanium_mobile,emilyvon/titanium_mobile,ashcoding/titanium_mobile,mano-mykingdom/titanium_mobile,ashcoding/titanium_mobile,benbahrenburg/titanium_mobile,KangaCoders/titanium_mobile,jhaynie/titanium_mobile,ashcoding/titanium_mobile,bhatfield/titanium_mobile,linearhub/titanium_mobile,KangaCoders/titanium_mobile,csg-coder/titanium_mobile,smit1625/titanium_mobile,pinnamur/titanium_mobile,AngelkPetkov/titanium_mobile,jvkops/titanium_mobile,benbahrenburg/titanium_mobile,mvitr/titanium_mobile,indera/titanium_mobile,openbaoz/titanium_mobile,falkolab/titanium_mobile,sriks/titanium_mobile,peymanmortazavi/titanium_mobile,sriks/titanium_mobile,benbahrenburg/titanium_mobile,taoger/titanium_mobile,kopiro/titanium_mobile,formalin14/titanium_mobile,bright-sparks/titanium_mobile,perdona/titanium_mobile,FokkeZB/titanium_mobile,KoketsoMabuela92/titanium_mobile,sriks/titanium_mobile,peymanmortazavi/titanium_mobile,KangaCoders/titanium_mobile,linearhub/titanium_mobile,AngelkPetkov/titanium_mobile,benbahrenburg/titanium_mobile,smit1625/titanium_mobile,jvkops/titanium_mobile,jhaynie/titanium_mobile,pec1985/titanium_mobile,ashcoding/titanium_mobile,ashcoding/titanium_mobile,jvkops/titanium_mobile,peymanmortazavi/titanium_mobile,AngelkPetkov/titanium_mobile,rblalock/titanium_mobile,falkolab/titanium_mobile,emilyvon/titanium_mobile,KoketsoMabuela92/titanium_mobile,perdona/titanium_mobile,prop/titanium_mobile,rblalock/titanium_mobile,bright-sparks/titanium_mobile,bright-sparks/titanium_mobile,peymanmortazavi/titanium_mobile,pec1985/titanium_mobile,linearhub/titanium_mobile,csg-coder/titanium_mobile,taoger/titanium_mobile,jhaynie/titanium_mobile,rblalock/titanium_mobile,FokkeZB/titanium_mobile,kopiro/titanium_mobile,KangaCoders/titanium_mobile,indera/titanium_mobile,benbahrenburg/titanium_mobile,shopmium/titanium_mobile,indera/titanium_mobile,peymanmortazavi/titanium_mobile,hieupham007/Titanium_Mobile,shopmium/titanium_mobile,kopiro/titanium_mobile,KoketsoMabuela92/titanium_mobile,bright-sparks/titanium_mobile,rblalock/titanium_mobile,pinnamur/titanium_mobile,shopmium/titanium_mobile,hieupham007/Titanium_Mobile,bhatfield/titanium_mobile,formalin14/titanium_mobile,AngelkPetkov/titanium_mobile,csg-coder/titanium_mobile,AngelkPetkov/titanium_mobile,emilyvon/titanium_mobile,indera/titanium_mobile,ashcoding/titanium_mobile,cheekiatng/titanium_mobile,KoketsoMabuela92/titanium_mobile,pinnamur/titanium_mobile,KoketsoMabuela92/titanium_mobile,smit1625/titanium_mobile,cheekiatng/titanium_mobile,linearhub/titanium_mobile,bright-sparks/titanium_mobile,openbaoz/titanium_mobile,bright-sparks/titanium_mobile,FokkeZB/titanium_mobile,FokkeZB/titanium_mobile,linearhub/titanium_mobile,cheekiatng/titanium_mobile,sriks/titanium_mobile,hieupham007/Titanium_Mobile,kopiro/titanium_mobile,indera/titanium_mobile,mvitr/titanium_mobile,emilyvon/titanium_mobile,bright-sparks/titanium_mobile,mano-mykingdom/titanium_mobile,benbahrenburg/titanium_mobile,benbahrenburg/titanium_mobile,FokkeZB/titanium_mobile,mano-mykingdom/titanium_mobile,collinprice/titanium_mobile,csg-coder/titanium_mobile,collinprice/titanium_mobile,mvitr/titanium_mobile,sriks/titanium_mobile,falkolab/titanium_mobile,perdona/titanium_mobile,perdona/titanium_mobile,sriks/titanium_mobile,jhaynie/titanium_mobile,kopiro/titanium_mobile,falkolab/titanium_mobile,hieupham007/Titanium_Mobile,bright-sparks/titanium_mobile,rblalock/titanium_mobile,cheekiatng/titanium_mobile,taoger/titanium_mobile,KoketsoMabuela92/titanium_mobile,falkolab/titanium_mobile,mvitr/titanium_mobile,taoger/titanium_mobile,emilyvon/titanium_mobile,formalin14/titanium_mobile,mvitr/titanium_mobile,prop/titanium_mobile,jvkops/titanium_mobile,perdona/titanium_mobile,pinnamur/titanium_mobile,indera/titanium_mobile,kopiro/titanium_mobile,hieupham007/Titanium_Mobile,smit1625/titanium_mobile,mvitr/titanium_mobile,mano-mykingdom/titanium_mobile,KangaCoders/titanium_mobile,linearhub/titanium_mobile,csg-coder/titanium_mobile,prop/titanium_mobile,taoger/titanium_mobile,smit1625/titanium_mobile,ashcoding/titanium_mobile,linearhub/titanium_mobile,mvitr/titanium_mobile,emilyvon/titanium_mobile,pinnamur/titanium_mobile,formalin14/titanium_mobile,kopiro/titanium_mobile,bhatfield/titanium_mobile
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2011 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package ti.modules.titanium.ui.widget; import java.util.ArrayList; import org.appcelerator.kroll.KrollDict; import org.appcelerator.kroll.KrollProxy; import org.appcelerator.titanium.proxy.TiViewProxy; import org.appcelerator.titanium.util.Log; import org.appcelerator.titanium.util.TiConvert; import org.appcelerator.titanium.util.TiEventHelper; import org.appcelerator.titanium.view.TiCompositeLayout; import org.appcelerator.titanium.view.TiUIView; import org.appcelerator.titanium.view.TiCompositeLayout.LayoutParams; import ti.modules.titanium.ui.ScrollableViewProxy; import android.app.Activity; import android.content.Context; import android.os.Parcelable; import android.support.v4.view.PagerAdapter; import android.support.v4.view.ViewPager; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.View.OnClickListener; import android.widget.RelativeLayout; public class TiUIScrollableView extends TiUIView { private static final String TAG = "TiUIScrollableView"; private static final String PROPERTY_SHOW_PAGING_CONTROL = "showPagingControl"; private static final String PROPERTY_VIEWS = "views"; private static final String PROPERTY_CURRENT_PAGE = "currentPage"; private static final int PAGE_LEFT = 200; private static final int PAGE_RIGHT = 201; private final ViewPager mPager; private final ArrayList<TiViewProxy> mViews; private final ViewPagerAdapter mAdapter; private final TiCompositeLayout mContainer; private final RelativeLayout mPagingControl; private int mCurIndex = -1; private boolean mShowPagingControl = false; public TiUIScrollableView(ScrollableViewProxy proxy) { super(proxy); mViews = new ArrayList<TiViewProxy>(); mAdapter = new ViewPagerAdapter(proxy.getTiContext().getActivity(), mViews); mPager = buildViewPager(proxy.getContext(), mAdapter); mContainer = new TiViewPagerLayout(proxy.getContext()); mContainer.addView(mPager, buildFillLayoutParams()); mPagingControl = buildPagingControl(proxy.getContext()); mContainer.addView(mPagingControl, buildFillLayoutParams()); setNativeView(mContainer); } private ViewPager buildViewPager(Context context, ViewPagerAdapter adapter) { ViewPager pager = new ViewPager(context); pager.setAdapter(adapter); pager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { super.onPageSelected(position); int oldIndex = mCurIndex; mCurIndex = position; if (mCurIndex >= 0) { if (oldIndex >=0 && oldIndex != mCurIndex && oldIndex < mViews.size()) { // Don't know what these focused and unfocused // events are good for, but they were in our previous // scrollable implementation. // cf. https://github.com/appcelerator/titanium_mobile/blob/20335d8603e2708b59a18bafbb91b7292278de8e/android/modules/ui/src/ti/modules/titanium/ui/widget/TiScrollableView.java#L260 TiEventHelper.fireFocused(mViews.get(oldIndex)); } TiEventHelper.fireUnfocused(mViews.get(mCurIndex)); if (oldIndex >= 0) { // oldIndex will be -1 if the view has just // been created and is setting currentPage // to something other than 0. In that case we // don't want a scroll to fire. ((ScrollableViewProxy)proxy).fireScroll(mCurIndex); } } if (mShowPagingControl) { showPager(); } } }); return pager; } private TiCompositeLayout.LayoutParams buildFillLayoutParams() { TiCompositeLayout.LayoutParams params = new TiCompositeLayout.LayoutParams(); params.autoFillsHeight = true; params.autoFillsWidth = true; return params; } private RelativeLayout buildPagingControl(Context context) { RelativeLayout layout = new RelativeLayout(context); layout.setFocusable(false); layout.setFocusableInTouchMode(false); TiArrowView left = new TiArrowView(context); left.setVisibility(View.INVISIBLE); left.setId(PAGE_LEFT); left.setMinimumWidth(80); // TODO density? left.setMinimumHeight(80); left.setOnClickListener(new OnClickListener(){ public void onClick(View v) { movePrevious(); }}); RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); params.addRule(RelativeLayout.ALIGN_PARENT_LEFT); params.addRule(RelativeLayout.CENTER_VERTICAL); layout.addView(left, params); TiArrowView right = new TiArrowView(context); right.setLeft(false); right.setVisibility(View.INVISIBLE); right.setId(PAGE_RIGHT); right.setMinimumWidth(80); // TODO density? right.setMinimumHeight(80); right.setOnClickListener(new OnClickListener(){ public void onClick(View v) { moveNext(); }}); params = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); params.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); params.addRule(RelativeLayout.CENTER_VERTICAL); layout.addView(right, params); layout.setVisibility(View.GONE); return layout; } @Override public void processProperties(KrollDict d) { if (d.containsKey(PROPERTY_VIEWS)) { setViews(d.get(PROPERTY_VIEWS)); } if (d.containsKey(PROPERTY_SHOW_PAGING_CONTROL)) { mShowPagingControl = TiConvert.toBoolean(d, PROPERTY_SHOW_PAGING_CONTROL); } if (d.containsKey(PROPERTY_CURRENT_PAGE)) { int page = TiConvert.toInt(d, PROPERTY_CURRENT_PAGE); if (page > 0) { setCurrentPage(page); } else { mCurIndex = 0; } } else { mCurIndex = 0; } super.processProperties(d); if (mShowPagingControl) { showPager(); } } @Override public void propertyChanged(String key, Object oldValue, Object newValue, KrollProxy proxy) { if(PROPERTY_CURRENT_PAGE.equals(key)) { setCurrentPage(TiConvert.toInt(newValue)); } else { super.propertyChanged(key, oldValue, newValue, proxy); } } public void setShowPagingControl(boolean show) { mShowPagingControl = show; } public void addView(TiViewProxy proxy) { mViews.add(proxy); mAdapter.notifyDataSetChanged(); } public void removeView(TiViewProxy proxy) { if (mViews.contains(proxy)) { mViews.remove(proxy); mAdapter.notifyDataSetChanged(); } } public void showPager() { View v = null; v = mContainer.findViewById(PAGE_LEFT); if (v != null) { v.setVisibility(mCurIndex > 0 ? View.VISIBLE : View.INVISIBLE); } v = mContainer.findViewById(PAGE_RIGHT); if (v != null) { v.setVisibility(mCurIndex < (mViews.size() - 1) ? View.VISIBLE : View.INVISIBLE); } mPagingControl.setVisibility(View.VISIBLE); ((ScrollableViewProxy) proxy).setPagerTimeout(); } public void hidePager() { mPagingControl.setVisibility(View.INVISIBLE); } public void moveNext() { move(mCurIndex + 1); } public void movePrevious() { move(mCurIndex - 1); } private void move(int index) { if (index < 0 || index >= mViews.size()) { Log.w(TAG, "Request to move to index " + index+ " ignored, as it is out-of-bounds."); return; } mPager.setCurrentItem(index); } public void scrollTo(Object view) { if (view instanceof Number) { move(((Number) view).intValue()); } else if (view instanceof TiViewProxy) { move(mViews.indexOf(view)); } } public int getCurrentPage() { return mCurIndex; } public void setCurrentPage(Object view) { scrollTo(view); } private void clearViewsList() { if (mViews == null || mViews.size() == 0) { return; } for (TiViewProxy viewProxy : mViews) { viewProxy.releaseViews(); } mViews.clear(); } public void setViews(Object viewsObject) { boolean changed = false; clearViewsList(); if (viewsObject instanceof Object[]) { Object[] views = (Object[])viewsObject; for (int i = 0; i < views.length; i++) { if (views[i] instanceof TiViewProxy) { TiViewProxy tv = (TiViewProxy)views[i]; mViews.add(tv); changed = true; } } } if (changed) { mAdapter.notifyDataSetChanged(); } } public ArrayList<TiViewProxy> getViews() { return mViews; } @Override public void release() { if (mPager != null) { for (int i = mPager.getChildCount() - 1; i >= 0; i--) { mPager.removeViewAt(i); } } if (mViews != null) { for (TiViewProxy viewProxy : mViews) { viewProxy.releaseViews(); } mViews.clear(); } super.release(); } public static class ViewPagerAdapter extends PagerAdapter { private final Activity mActivity; private final ArrayList<TiViewProxy> mViewProxies; public ViewPagerAdapter(Activity activity, ArrayList<TiViewProxy> viewProxies) { mActivity = activity; mViewProxies = viewProxies; } @Override public void destroyItem(View container, int position, Object object) { ((ViewPager) container).removeView((View) object); if (position < mViewProxies.size()) { TiViewProxy proxy = mViewProxies.get(position); proxy.releaseViews(); } } @Override public void finishUpdate(View container) {} @Override public int getCount() { return mViewProxies.size(); } @Override public Object instantiateItem(View container, int position) { ViewPager pager = (ViewPager) container; TiViewProxy tiProxy = mViewProxies.get(position); TiUIView tiView = tiProxy.getView(mActivity); View view = tiView.getNativeView(); if (view.getParent() != null) { pager.removeView(view); } if (position < pager.getChildCount()) { pager.addView(view, position); } else { pager.addView(view); } return view; } @Override public boolean isViewFromObject(View view, Object obj) { return (obj instanceof View && view.equals(obj)); } @Override public void restoreState(Parcelable state, ClassLoader loader) {} @Override public Parcelable saveState() {return null;} @Override public void startUpdate(View container) {} @Override public int getItemPosition(Object object) { if (!mViewProxies.contains(object)) { return POSITION_NONE; } else { return POSITION_UNCHANGED; } } } public class TiViewPagerLayout extends TiCompositeLayout { public TiViewPagerLayout(Context context) { super(context); setFocusable(true); setFocusableInTouchMode(true); setDescendantFocusability(ViewGroup.FOCUS_AFTER_DESCENDANTS); } @Override public boolean onTrackballEvent(MotionEvent event) { // Any trackball activity should show the pager. if (mShowPagingControl && mPagingControl.getVisibility() != View.VISIBLE) { showPager(); } return super.onTrackballEvent(event); } @Override public boolean dispatchKeyEvent(KeyEvent event) { boolean handled = false; if (event.getAction() == KeyEvent.ACTION_DOWN) { switch (event.getKeyCode()) { case KeyEvent.KEYCODE_DPAD_LEFT: { movePrevious(); handled = true; break; } case KeyEvent.KEYCODE_DPAD_RIGHT: { moveNext(); handled = true; break; } } } return handled || super.dispatchKeyEvent(event); } } }
android/modules/ui/src/ti/modules/titanium/ui/widget/TiUIScrollableView.java
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2011 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package ti.modules.titanium.ui.widget; import java.util.ArrayList; import org.appcelerator.kroll.KrollDict; import org.appcelerator.kroll.KrollProxy; import org.appcelerator.titanium.proxy.TiViewProxy; import org.appcelerator.titanium.util.Log; import org.appcelerator.titanium.util.TiConvert; import org.appcelerator.titanium.util.TiEventHelper; import org.appcelerator.titanium.view.TiCompositeLayout; import org.appcelerator.titanium.view.TiUIView; import org.appcelerator.titanium.view.TiCompositeLayout.LayoutParams; import ti.modules.titanium.ui.ScrollableViewProxy; import android.app.Activity; import android.content.Context; import android.os.Parcelable; import android.support.v4.view.PagerAdapter; import android.support.v4.view.ViewPager; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.View.OnClickListener; import android.widget.RelativeLayout; public class TiUIScrollableView extends TiUIView { private static final String TAG = "TiUIScrollableView"; private static final String PROPERTY_SHOW_PAGING_CONTROL = "showPagingControl"; private static final String PROPERTY_VIEWS = "views"; private static final String PROPERTY_CURRENT_PAGE = "currentPage"; private static final int PAGE_LEFT = 200; private static final int PAGE_RIGHT = 201; private final ViewPager mPager; private final ArrayList<TiViewProxy> mViews; private final ViewPagerAdapter mAdapter; private final TiCompositeLayout mContainer; private final RelativeLayout mPagingControl; private int mCurIndex = -1; private boolean mShowPagingControl = false; public TiUIScrollableView(ScrollableViewProxy proxy) { super(proxy); mViews = new ArrayList<TiViewProxy>(); mAdapter = new ViewPagerAdapter(proxy.getTiContext().getActivity(), mViews); mPager = buildViewPager(proxy.getContext(), mAdapter); mContainer = new TiViewPagerLayout(proxy.getContext()); mContainer.addView(mPager, buildFillLayoutParams()); mPagingControl = buildPagingControl(proxy.getContext()); mContainer.addView(mPagingControl, buildFillLayoutParams()); setNativeView(mContainer); } private ViewPager buildViewPager(Context context, ViewPagerAdapter adapter) { ViewPager pager = new ViewPager(context); pager.setAdapter(adapter); pager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { super.onPageSelected(position); int oldIndex = mCurIndex; mCurIndex = position; if (mCurIndex >= 0) { if (oldIndex >=0 && oldIndex != mCurIndex && oldIndex < mViews.size()) { // Don't know what these focused and unfocused // events are good for, but they were in our previous // scrollable implementation. // cf. https://github.com/appcelerator/titanium_mobile/blob/20335d8603e2708b59a18bafbb91b7292278de8e/android/modules/ui/src/ti/modules/titanium/ui/widget/TiScrollableView.java#L260 TiEventHelper.fireFocused(mViews.get(oldIndex)); } TiEventHelper.fireUnfocused(mViews.get(mCurIndex)); if (oldIndex >= 0) { // oldIndex will be -1 if the view has just // been created and is setting currentPage // to something other than 0. In that case we // don't want a scroll to fire. ((ScrollableViewProxy)proxy).fireScroll(mCurIndex); } } if (mShowPagingControl) { showPager(); } } }); return pager; } private TiCompositeLayout.LayoutParams buildFillLayoutParams() { TiCompositeLayout.LayoutParams params = new TiCompositeLayout.LayoutParams(); params.autoFillsHeight = true; params.autoFillsWidth = true; return params; } private RelativeLayout buildPagingControl(Context context) { RelativeLayout layout = new RelativeLayout(context); layout = new RelativeLayout(proxy.getContext()); layout.setFocusable(false); layout.setFocusableInTouchMode(false); TiArrowView left = new TiArrowView(proxy.getContext()); left.setVisibility(View.INVISIBLE); left.setId(PAGE_LEFT); left.setMinimumWidth(80); // TODO density? left.setMinimumHeight(80); left.setOnClickListener(new OnClickListener(){ public void onClick(View v) { movePrevious(); }}); RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); params.addRule(RelativeLayout.ALIGN_PARENT_LEFT); params.addRule(RelativeLayout.CENTER_VERTICAL); layout.addView(left, params); TiArrowView right = new TiArrowView(proxy.getContext()); right.setLeft(false); right.setVisibility(View.INVISIBLE); right.setId(PAGE_RIGHT); right.setMinimumWidth(80); // TODO density? right.setMinimumHeight(80); right.setOnClickListener(new OnClickListener(){ public void onClick(View v) { moveNext(); }}); params = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); params.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); params.addRule(RelativeLayout.CENTER_VERTICAL); layout.addView(right, params); layout.setVisibility(View.GONE); return layout; } @Override public void processProperties(KrollDict d) { if (d.containsKey(PROPERTY_VIEWS)) { setViews(d.get(PROPERTY_VIEWS)); } if (d.containsKey(PROPERTY_SHOW_PAGING_CONTROL)) { mShowPagingControl = TiConvert.toBoolean(d, PROPERTY_SHOW_PAGING_CONTROL); } if (d.containsKey(PROPERTY_CURRENT_PAGE)) { int page = TiConvert.toInt(d, PROPERTY_CURRENT_PAGE); if (page > 0) { setCurrentPage(page); } else { mCurIndex = 0; } } else { mCurIndex = 0; } super.processProperties(d); if (mShowPagingControl) { showPager(); } } @Override public void propertyChanged(String key, Object oldValue, Object newValue, KrollProxy proxy) { if(PROPERTY_CURRENT_PAGE.equals(key)) { setCurrentPage(TiConvert.toInt(newValue)); } else { super.propertyChanged(key, oldValue, newValue, proxy); } } public void setShowPagingControl(boolean show) { mShowPagingControl = show; } public void addView(TiViewProxy proxy) { mViews.add(proxy); mAdapter.notifyDataSetChanged(); } public void removeView(TiViewProxy proxy) { if (mViews.contains(proxy)) { mViews.remove(proxy); mAdapter.notifyDataSetChanged(); } } public void showPager() { View v = null; v = mContainer.findViewById(PAGE_LEFT); if (v != null) { v.setVisibility(mCurIndex > 0 ? View.VISIBLE : View.INVISIBLE); } v = mContainer.findViewById(PAGE_RIGHT); if (v != null) { v.setVisibility(mCurIndex < (mViews.size() - 1) ? View.VISIBLE : View.INVISIBLE); } mPagingControl.setVisibility(View.VISIBLE); ((ScrollableViewProxy) proxy).setPagerTimeout(); } public void hidePager() { mPagingControl.setVisibility(View.INVISIBLE); } public void moveNext() { move(mCurIndex + 1); } public void movePrevious() { move(mCurIndex - 1); } private void move(int index) { if (index < 0 || index >= mViews.size()) { Log.w(TAG, "Request to move to index " + index+ " ignored, as it is out-of-bounds."); return; } mPager.setCurrentItem(index); } public void scrollTo(Object view) { if (view instanceof Number) { move(((Number) view).intValue()); } else if (view instanceof TiViewProxy) { move(mViews.indexOf(view)); } } public int getCurrentPage() { return mCurIndex; } public void setCurrentPage(Object view) { scrollTo(view); } private void clearViewsList() { if (mViews == null || mViews.size() == 0) { return; } for (TiViewProxy viewProxy : mViews) { viewProxy.releaseViews(); } mViews.clear(); } public void setViews(Object viewsObject) { boolean changed = false; clearViewsList(); if (viewsObject instanceof Object[]) { Object[] views = (Object[])viewsObject; for (int i = 0; i < views.length; i++) { if (views[i] instanceof TiViewProxy) { TiViewProxy tv = (TiViewProxy)views[i]; mViews.add(tv); changed = true; } } } if (changed) { mAdapter.notifyDataSetChanged(); } } public ArrayList<TiViewProxy> getViews() { return mViews; } @Override public void release() { if (mPager != null) { for (int i = mPager.getChildCount() - 1; i >= 0; i--) { mPager.removeViewAt(i); } } if (mViews != null) { for (TiViewProxy viewProxy : mViews) { viewProxy.releaseViews(); } mViews.clear(); } super.release(); } public static class ViewPagerAdapter extends PagerAdapter { private final Activity mActivity; private final ArrayList<TiViewProxy> mViewProxies; public ViewPagerAdapter(Activity activity, ArrayList<TiViewProxy> viewProxies) { mActivity = activity; mViewProxies = viewProxies; } @Override public void destroyItem(View container, int position, Object object) { ((ViewPager) container).removeView((View) object); if (position < mViewProxies.size()) { TiViewProxy proxy = mViewProxies.get(position); proxy.releaseViews(); } } @Override public void finishUpdate(View container) {} @Override public int getCount() { return mViewProxies.size(); } @Override public Object instantiateItem(View container, int position) { ViewPager pager = (ViewPager) container; TiViewProxy tiProxy = mViewProxies.get(position); TiUIView tiView = tiProxy.getView(mActivity); View view = tiView.getNativeView(); if (view.getParent() != null) { pager.removeView(view); } if (position < pager.getChildCount()) { pager.addView(view, position); } else { pager.addView(view); } return view; } @Override public boolean isViewFromObject(View view, Object obj) { return (obj instanceof View && view.equals(obj)); } @Override public void restoreState(Parcelable state, ClassLoader loader) {} @Override public Parcelable saveState() {return null;} @Override public void startUpdate(View container) {} @Override public int getItemPosition(Object object) { if (!mViewProxies.contains(object)) { return POSITION_NONE; } else { return POSITION_UNCHANGED; } } } public class TiViewPagerLayout extends TiCompositeLayout { public TiViewPagerLayout(Context context) { super(context); setFocusable(true); setFocusableInTouchMode(true); setDescendantFocusability(ViewGroup.FOCUS_AFTER_DESCENDANTS); } @Override public boolean onTrackballEvent(MotionEvent event) { // Any trackball activity should show the pager. if (mShowPagingControl && mPagingControl.getVisibility() != View.VISIBLE) { showPager(); } return super.onTrackballEvent(event); } @Override public boolean dispatchKeyEvent(KeyEvent event) { boolean handled = false; if (event.getAction() == KeyEvent.ACTION_DOWN) { switch (event.getKeyCode()) { case KeyEvent.KEYCODE_DPAD_LEFT: { movePrevious(); handled = true; break; } case KeyEvent.KEYCODE_DPAD_RIGHT: { moveNext(); handled = true; break; } } } return handled || super.dispatchKeyEvent(event); } } }
TIMOB-5169 Remove double construction of layout.
android/modules/ui/src/ti/modules/titanium/ui/widget/TiUIScrollableView.java
TIMOB-5169 Remove double construction of layout.
Java
apache-2.0
6776cc80125875e4e8011a9b8c202029ea172a4f
0
poorya-abbasi/Gonnect
app/src/main/java/org/marlik/innovelopers/gonnect/FullResponseStructure.java
package org.marlik.innovelopers.gonnect; import okhttp3.Headers; /** * Created by pabbasi on 5/15/17. */ public class FullResponseStructure { String body; Headers headers; }
Delete FullResponseStructure.java
app/src/main/java/org/marlik/innovelopers/gonnect/FullResponseStructure.java
Delete FullResponseStructure.java
Java
apache-2.0
5f68ae23f0517f149f63346ca3292ff28182bccd
0
steinarb/ukelonn,steinarb/ukelonn,steinarb/ukelonn
package no.priv.bang.ukelonn.impl; import static no.priv.bang.ukelonn.impl.CommonDatabaseMethods.*; import java.net.URI; import java.security.Principal; import java.util.List; import java.util.Map; import com.vaadin.annotations.Theme; import com.vaadin.data.Property.ValueChangeEvent; import com.vaadin.data.Property.ValueChangeListener; import com.vaadin.data.util.BeanItemContainer; import com.vaadin.data.util.ObjectProperty; import com.vaadin.server.Responsive; import com.vaadin.server.VaadinRequest; import com.vaadin.ui.Accordion; import com.vaadin.ui.Button; import com.vaadin.ui.ComboBox; import com.vaadin.ui.Component; import com.vaadin.ui.FormLayout; import com.vaadin.ui.Label; import com.vaadin.ui.Table; import com.vaadin.ui.TextField; import com.vaadin.ui.VerticalLayout; import com.vaadin.ui.AbstractSelect.ItemCaptionMode; import com.vaadin.ui.Button.ClickEvent; @Theme("chameleon") public class UkelonnAdminUI extends AbstractUI { private static final long serialVersionUID = -1581589472749242129L; final int idOfPayToBank = 4; @Override protected void init(VaadinRequest request) { if (!isAdministrator()) { URI userPage = addPathToURI(getPage().getLocation(), "../user/"); getPage().setLocation(userPage); } VerticalLayout content = new VerticalLayout(); content.addStyleName("ukelonn-responsive-layout"); Responsive.makeResponsive(content); Principal currentUser = request.getUserPrincipal(); AdminUser admin = getAdminUserFromDatabase(getClass(), (String) currentUser.getName()); // Display the greeting Component greeting = new Label("Hei " + admin.getFirstname()); greeting.setStyleName("h1"); content.addComponent(greeting); // Updatable containers ObjectProperty<Double> balance = new ObjectProperty<Double>(0.0); BeanItemContainer<Transaction> recentJobs = new BeanItemContainer<Transaction>(Transaction.class); BeanItemContainer<Transaction> recentPayments = new BeanItemContainer<Transaction>(Transaction.class); Map<Integer, TransactionType> transactionTypes = getTransactionTypesFromUkelonnDatabase(getClass()); BeanItemContainer<TransactionType> paymentTypes = new BeanItemContainer<TransactionType>(TransactionType.class, getPaymentTypesFromTransactionTypes(transactionTypes.values())); BeanItemContainer<TransactionType> jobTypes = new BeanItemContainer<TransactionType>(TransactionType.class, getJobTypesFromTransactionTypes(transactionTypes.values())); ComboBox paymenttype = new ComboBox("Registrer utbetaling", paymentTypes); ObjectProperty<Double> amount = new ObjectProperty<Double>(0.0); Class<? extends UkelonnAdminUI> classForLogMessage = getClass(); Accordion accordion = new Accordion(); VerticalLayout registerPaymentTab = new VerticalLayout(); List<Account> accounts = getAccounts(getClass()); BeanItemContainer<Account> accountsContainer = new BeanItemContainer<Account>(Account.class, accounts); ComboBox accountSelector = new ComboBox("Velg hvem det skal betales til", accountsContainer); accountSelector.setItemCaptionMode(ItemCaptionMode.PROPERTY); accountSelector.setItemCaptionPropertyId("fullName"); accountSelector.addValueChangeListener(new ValueChangeListener() { private static final long serialVersionUID = -781514357123503476L; @Override public void valueChange(ValueChangeEvent event) { Account account = (Account) accountSelector.getValue(); jobTypes.removeAllItems(); paymentTypes.removeAllItems(); recentJobs.removeAllItems(); recentPayments.removeAllItems(); if (account != null) { refreshAccount(classForLogMessage, account); balance.setValue(account.getBalance()); Map<Integer, TransactionType> transactionTypes = getTransactionTypesFromUkelonnDatabase(classForLogMessage); jobTypes.addAll(getJobTypesFromTransactionTypes(transactionTypes.values())); paymentTypes.addAll(getPaymentTypesFromTransactionTypes(transactionTypes.values())); paymenttype.select(transactionTypes.get(idOfPayToBank)); amount.setValue(balance.getValue()); recentJobs.addAll(getJobsFromAccount(account, classForLogMessage)); recentPayments.addAll(getPaymentsFromAccount(account, classForLogMessage)); } } }); registerPaymentTab.addComponent(accountSelector); FormLayout paymentLayout = new FormLayout(); TextField balanceDisplay = new TextField("Til gode:"); balanceDisplay.setPropertyDataSource(balance); balanceDisplay.addStyleName("inline-label"); paymentLayout.addComponent(balanceDisplay); paymenttype.setItemCaptionMode(ItemCaptionMode.PROPERTY); paymenttype.setItemCaptionPropertyId("transactionTypeName"); paymenttype.addValueChangeListener(new ValueChangeListener() { private static final long serialVersionUID = -8306551057458139402L; @Override public void valueChange(ValueChangeEvent event) { TransactionType payment = (TransactionType) paymenttype.getValue(); if (payment != null) { Double paymentAmount = payment.getTransactionAmount(); if (payment.getId() == idOfPayToBank || paymentAmount != null) { amount.setValue(balance.getValue()); } else { amount.setValue(paymentAmount); } } } }); paymentLayout.addComponent(paymenttype); TextField amountField = new TextField("Beløp:", amount); paymentLayout.addComponent(amountField); paymentLayout.addComponent(new Button("Registrer betaling", new Button.ClickListener() { private static final long serialVersionUID = 5260321175219218136L; @Override public void buttonClick(ClickEvent event) { Account account = (Account) accountSelector.getValue(); TransactionType payment = (TransactionType) paymenttype.getValue(); if (account != null && payment != null) { addNewPaymentToAccount(classForLogMessage, account, payment, amount.getValue()); recentPayments.removeAllItems(); recentPayments.addAll(getPaymentsFromAccount(account, classForLogMessage)); refreshAccount(classForLogMessage, account); balance.setValue(account.getBalance()); amount.setValue(0.0); } } })); registerPaymentTab.addComponent(paymentLayout); Accordion userinfo = new Accordion(); VerticalLayout jobsTab = new VerticalLayout(); Table lastJobsTable = createTransactionTable("Jobbtype", recentJobs); jobsTab.addComponent(lastJobsTable); userinfo.addTab(jobsTab, "Siste jobber"); VerticalLayout paymentsTab = new VerticalLayout(); Table lastPaymentsTable = createTransactionTable("Type utbetaling", recentPayments); paymentsTab.addComponent(lastPaymentsTable); userinfo.addTab(paymentsTab, "Siste utbetalinger"); registerPaymentTab.addComponent(userinfo); accordion.addTab(registerPaymentTab, "Registrere utbetaling"); // Updatable data model for the form elements (setting values in the properties will update the fields) ObjectProperty<String> newJobTypeName = new ObjectProperty<String>(""); ObjectProperty<Double> newJobTypeAmount = new ObjectProperty<Double>(0.0); ObjectProperty<String> editedJobTypeName = new ObjectProperty<String>(""); ObjectProperty<Double> editedJobTypeAmount = new ObjectProperty<Double>(0.0); ObjectProperty<String> newPaymentTypeName = new ObjectProperty<String>(""); ObjectProperty<Double> newPaymentTypeAmount = new ObjectProperty<Double>(0.0); VerticalLayout jobtypeAdminTab = new VerticalLayout(); Accordion jobtypes = new Accordion(); FormLayout newJobTypeTab = new FormLayout(); TextField newJobTypeNameField = new TextField("Navn på ny jobbtype:", newJobTypeName); newJobTypeTab.addComponent(newJobTypeNameField); TextField newJobTypeAmountField = new TextField("Beløp for ny jobbtype:", newJobTypeAmount); newJobTypeTab.addComponent(newJobTypeAmountField); newJobTypeTab.addComponent(new Button("Lag jobbtype", new Button.ClickListener() { private static final long serialVersionUID = 1338062460936195627L; @Override public void buttonClick(ClickEvent event) { String jobname = newJobTypeName.getValue(); Double jobamount = newJobTypeAmount.getValue(); if (!"".equals(jobname) && !Double.valueOf(0.0).equals(jobamount)) { addJobTypeToDatabase(classForLogMessage, jobname, jobamount); newJobTypeName.setValue(""); newJobTypeAmount.setValue(0.0); } } })); jobtypes.addTab(newJobTypeTab, "Lag ny jobbtype"); VerticalLayout jobtypesform = new VerticalLayout(); Table jobtypesTable = new Table(); jobtypesTable.addContainerProperty("transactionTypeName", String.class, null, "Navn", null, null); jobtypesTable.addContainerProperty("transactionAmount", Double.class, null, "Beløp", null, null); jobtypesTable.setContainerDataSource(jobTypes); jobtypesTable.setVisibleColumns("transactionTypeName", "transactionAmount"); jobtypesTable.setSelectable(true); jobtypesTable.addValueChangeListener(new ValueChangeListener() { private static final long serialVersionUID = -8324617275480799162L; @Override public void valueChange(ValueChangeEvent event) { TransactionType transactionType = (TransactionType) jobtypesTable.getValue(); if (transactionType != null) { editedJobTypeName.setValue(transactionType.getTransactionTypeName()); editedJobTypeAmount.setValue(transactionType.getTransactionAmount()); } } }); jobtypesform.addComponent(jobtypesTable); FormLayout editJobLayout = new FormLayout(); TextField editJobTypeNameField = new TextField("Endre Navn på jobbtype:", editedJobTypeName); editJobLayout.addComponent(editJobTypeNameField); TextField editJobTypeAmountField = new TextField("Endre beløp for jobbtype:", editedJobTypeAmount); editJobLayout.addComponent(editJobTypeAmountField); editJobLayout.addComponent(new Button("Lagre endringer i jobbtype", new Button.ClickListener() { private static final long serialVersionUID = 347708021528799659L; @Override public void buttonClick(ClickEvent event) { TransactionType transactionType = (TransactionType) jobtypesTable.getValue(); if (transactionType != null) { if (!"".equals(editJobTypeNameField.getValue()) && !identicalToExistingValues(transactionType, editedJobTypeName, editedJobTypeAmount)) { transactionType.setTransactionTypeName(editedJobTypeName.getValue()); transactionType.setTransactionAmount(editedJobTypeAmount.getValue()); updateTransactionTypeInDatabase(classForLogMessage, transactionType); jobtypesTable.setValue(null); editedJobTypeName.setValue(""); editedJobTypeAmount.setValue(0.0); Map<Integer, TransactionType> transactionTypes = getTransactionTypesFromUkelonnDatabase(classForLogMessage); jobTypes.removeAllItems(); jobTypes.addAll(getJobTypesFromTransactionTypes(transactionTypes.values())); } } } private boolean identicalToExistingValues(TransactionType transactionType, ObjectProperty<String> transactionTypeName, ObjectProperty<Double> transactionTypeAmount) { if (transactionType == null || transactionType.getTransactionTypeName() == null || transactionType.getTransactionAmount() == null) { return false; // Nothing to compare against, always false } boolean isIdentical = transactionType.getTransactionTypeName().equals(transactionTypeName.getValue()) && transactionType.getTransactionAmount().equals(transactionTypeAmount.getValue()); return isIdentical; } })); jobtypesform.addComponent(editJobLayout); jobtypes.addTab(jobtypesform, "Endre jobbtyper"); jobtypeAdminTab.addComponent(jobtypes); accordion.addTab(jobtypeAdminTab, "Administrere jobbtyper"); VerticalLayout paymentstypeadminTab = new VerticalLayout(); Accordion paymentstypeadmin = new Accordion(); FormLayout newpaymenttypeTab = new FormLayout(); TextField newPaymentTypeNameField = new TextField("Navn på ny betalingstype:", newPaymentTypeName); newpaymenttypeTab.addComponent(newPaymentTypeNameField); TextField newPaymentTypeAmountField = new TextField("Beløp for ny betalingstype:", newPaymentTypeAmount); newpaymenttypeTab.addComponent(newPaymentTypeAmountField); newpaymenttypeTab.addComponent(new Button("Lag betalingstype", new Button.ClickListener() { private static final long serialVersionUID = -2160144195348196823L; @Override public void buttonClick(ClickEvent event) { String paymentName = newPaymentTypeName.getValue(); Double paymentAmount = newPaymentTypeAmount.getValue(); if (!"".equals(paymentName) && !Double.valueOf(0.0).equals(paymentAmount)) { addPaymentTypeToDatabase(classForLogMessage, paymentName, paymentAmount); newPaymentTypeName.setValue(""); newPaymentTypeAmount.setValue(0.0); } } })); paymentstypeadmin.addTab(newpaymenttypeTab, "Lag ny utbetalingstype"); VerticalLayout paymenttypesform = new VerticalLayout(); paymentstypeadmin.addTab(paymenttypesform, "Endre utbetalingstyper"); paymentstypeadminTab.addComponent(paymentstypeadmin); accordion.addTab(paymentstypeadminTab, "Administrere utbetalingstyper"); VerticalLayout useradminTab = new VerticalLayout(); Accordion useradmin = new Accordion(); VerticalLayout newuserTab = new VerticalLayout(); useradmin.addTab(newuserTab, "Legg til ny bruker"); VerticalLayout changeuserpasswordTab = new VerticalLayout(); useradmin.addTab(changeuserpasswordTab, "Bytt passord på bruker"); VerticalLayout usersTab = new VerticalLayout(); useradmin.addTab(usersTab, "Endre brukere"); useradminTab.addComponent(useradmin); accordion.addTab(useradminTab, "Administrere brukere"); content.addComponent(accordion); setContent(content); } }
ukelonn.bundle/src/main/java/no/priv/bang/ukelonn/impl/UkelonnAdminUI.java
package no.priv.bang.ukelonn.impl; import static no.priv.bang.ukelonn.impl.CommonDatabaseMethods.*; import java.net.URI; import java.security.Principal; import java.util.List; import java.util.Map; import com.vaadin.annotations.Theme; import com.vaadin.data.Property.ValueChangeEvent; import com.vaadin.data.Property.ValueChangeListener; import com.vaadin.data.util.BeanItemContainer; import com.vaadin.data.util.ObjectProperty; import com.vaadin.server.Responsive; import com.vaadin.server.VaadinRequest; import com.vaadin.ui.Accordion; import com.vaadin.ui.Button; import com.vaadin.ui.ComboBox; import com.vaadin.ui.Component; import com.vaadin.ui.FormLayout; import com.vaadin.ui.Label; import com.vaadin.ui.Table; import com.vaadin.ui.TextField; import com.vaadin.ui.VerticalLayout; import com.vaadin.ui.AbstractSelect.ItemCaptionMode; import com.vaadin.ui.Button.ClickEvent; @Theme("chameleon") public class UkelonnAdminUI extends AbstractUI { private static final long serialVersionUID = -1581589472749242129L; @SuppressWarnings("serial") @Override protected void init(VaadinRequest request) { if (!isAdministrator()) { URI userPage = addPathToURI(getPage().getLocation(), "../user/"); getPage().setLocation(userPage); } VerticalLayout content = new VerticalLayout(); content.addStyleName("ukelonn-responsive-layout"); Responsive.makeResponsive(content); Principal currentUser = request.getUserPrincipal(); AdminUser admin = getAdminUserFromDatabase(getClass(), (String) currentUser.getName()); // Display the greeting Component greeting = new Label("Hei " + admin.getFirstname()); greeting.setStyleName("h1"); content.addComponent(greeting); // Updatable containers ObjectProperty<Double> balance = new ObjectProperty<Double>(0.0); BeanItemContainer<Transaction> recentJobs = new BeanItemContainer<Transaction>(Transaction.class); BeanItemContainer<Transaction> recentPayments = new BeanItemContainer<Transaction>(Transaction.class); Map<Integer, TransactionType> transactionTypes = getTransactionTypesFromUkelonnDatabase(getClass()); BeanItemContainer<TransactionType> paymentTypes = new BeanItemContainer<TransactionType>(TransactionType.class, getPaymentTypesFromTransactionTypes(transactionTypes.values())); BeanItemContainer<TransactionType> jobTypes = new BeanItemContainer<TransactionType>(TransactionType.class, getJobTypesFromTransactionTypes(transactionTypes.values())); ComboBox paymenttype = new ComboBox("Registrer utbetaling", paymentTypes); ObjectProperty<Double> amount = new ObjectProperty<Double>(0.0); Class<? extends UkelonnAdminUI> classForLogMessage = getClass(); Accordion accordion = new Accordion(); VerticalLayout registerPaymentTab = new VerticalLayout(); List<Account> accounts = getAccounts(getClass()); BeanItemContainer<Account> accountsContainer = new BeanItemContainer<Account>(Account.class, accounts); ComboBox accountSelector = new ComboBox("Velg hvem det skal betales til", accountsContainer); accountSelector.setItemCaptionMode(ItemCaptionMode.PROPERTY); accountSelector.setItemCaptionPropertyId("fullName"); accountSelector.addValueChangeListener(new ValueChangeListener() { private static final long serialVersionUID = -781514357123503476L; @Override public void valueChange(ValueChangeEvent event) { Account account = (Account) accountSelector.getValue(); jobTypes.removeAllItems(); paymentTypes.removeAllItems(); recentJobs.removeAllItems(); recentPayments.removeAllItems(); if (account != null) { refreshAccount(classForLogMessage, account); balance.setValue(account.getBalance()); Map<Integer, TransactionType> transactionTypes = getTransactionTypesFromUkelonnDatabase(classForLogMessage); jobTypes.addAll(getJobTypesFromTransactionTypes(transactionTypes.values())); paymentTypes.addAll(getPaymentTypesFromTransactionTypes(transactionTypes.values())); final int idOfPayToBank = 4; paymenttype.select(transactionTypes.get(idOfPayToBank)); amount.setValue(balance.getValue()); recentJobs.addAll(getJobsFromAccount(account, classForLogMessage)); recentPayments.addAll(getPaymentsFromAccount(account, classForLogMessage)); } } }); registerPaymentTab.addComponent(accountSelector); FormLayout paymentLayout = new FormLayout(); TextField balanceDisplay = new TextField("Til gode:"); balanceDisplay.setPropertyDataSource(balance); balanceDisplay.addStyleName("inline-label"); paymentLayout.addComponent(balanceDisplay); paymenttype.setItemCaptionMode(ItemCaptionMode.PROPERTY); paymenttype.setItemCaptionPropertyId("transactionTypeName"); paymentLayout.addComponent(paymenttype); TextField amountField = new TextField("Beløp:", amount); paymentLayout.addComponent(amountField); paymentLayout.addComponent(new Button("Registrer betaling", new Button.ClickListener() { private static final long serialVersionUID = 5260321175219218136L; @Override public void buttonClick(ClickEvent event) { Account account = (Account) accountSelector.getValue(); TransactionType payment = (TransactionType) paymenttype.getValue(); if (account != null && payment != null) { addNewPaymentToAccount(classForLogMessage, account, payment, amount.getValue()); recentPayments.removeAllItems(); recentPayments.addAll(getPaymentsFromAccount(account, classForLogMessage)); refreshAccount(classForLogMessage, account); balance.setValue(account.getBalance()); amount.setValue(0.0); } } })); registerPaymentTab.addComponent(paymentLayout); Accordion userinfo = new Accordion(); VerticalLayout jobsTab = new VerticalLayout(); Table lastJobsTable = createTransactionTable("Jobbtype", recentJobs); jobsTab.addComponent(lastJobsTable); userinfo.addTab(jobsTab, "Siste jobber"); VerticalLayout paymentsTab = new VerticalLayout(); Table lastPaymentsTable = createTransactionTable("Type utbetaling", recentPayments); paymentsTab.addComponent(lastPaymentsTable); userinfo.addTab(paymentsTab, "Siste utbetalinger"); registerPaymentTab.addComponent(userinfo); accordion.addTab(registerPaymentTab, "Registrere utbetaling"); VerticalLayout jobtypeAdminTab = new VerticalLayout(); Accordion jobtypes = new Accordion(); FormLayout newJobTypeTab = new FormLayout(); ObjectProperty<String> newJobTypeName = new ObjectProperty<String>(""); ObjectProperty<Double> newJobTypeAmount = new ObjectProperty<Double>(0.0); ObjectProperty<String> editedJobTypeName = new ObjectProperty<String>(""); ObjectProperty<Double> editedJobTypeAmount = new ObjectProperty<Double>(0.0); TextField newJobTypeNameField = new TextField("Navn på ny jobbtype:", newJobTypeName); newJobTypeTab.addComponent(newJobTypeNameField); TextField newJobTypeAmountField = new TextField("Navn på ny jobbtype:", newJobTypeAmount); newJobTypeTab.addComponent(newJobTypeAmountField); newJobTypeTab.addComponent(new Button("Lag jobbtype", new Button.ClickListener() { private static final long serialVersionUID = 1338062460936195627L; @Override public void buttonClick(ClickEvent event) { String jobname = newJobTypeName.getValue(); Double jobamount = newJobTypeAmount.getValue(); if (!"".equals(jobname) && !Double.valueOf(0.0).equals(jobamount)) { addJobTypeToDatabase(classForLogMessage, jobname, jobamount); newJobTypeName.setValue(""); newJobTypeAmount.setValue(0.0); } } })); jobtypes.addTab(newJobTypeTab, "Lag ny jobbtype"); VerticalLayout jobtypesform = new VerticalLayout(); Table jobtypesTable = new Table(); jobtypesTable.addContainerProperty("transactionTypeName", String.class, null, "Navn", null, null); jobtypesTable.addContainerProperty("transactionAmount", Double.class, null, "Beløp", null, null); jobtypesTable.setContainerDataSource(jobTypes); jobtypesTable.setVisibleColumns("transactionTypeName", "transactionAmount"); jobtypesTable.setSelectable(true); jobtypesTable.addValueChangeListener(new ValueChangeListener() { private static final long serialVersionUID = -8324617275480799162L; @Override public void valueChange(ValueChangeEvent event) { TransactionType transactionType = (TransactionType) jobtypesTable.getValue(); if (transactionType != null) { editedJobTypeName.setValue(transactionType.getTransactionTypeName()); editedJobTypeAmount.setValue(transactionType.getTransactionAmount()); } } }); jobtypesform.addComponent(jobtypesTable); FormLayout editJobLayout = new FormLayout(); TextField editJobTypeNameField = new TextField("Endre Navn på jobbtype:", editedJobTypeName); editJobLayout.addComponent(editJobTypeNameField); TextField editJobTypeAmountField = new TextField("Endre på ny jobbtype:", editedJobTypeAmount); editJobLayout.addComponent(editJobTypeAmountField); editJobLayout.addComponent(new Button("Lagre endringer i jobbtype", new Button.ClickListener() { @Override public void buttonClick(ClickEvent event) { TransactionType transactionType = (TransactionType) jobtypesTable.getValue(); if (transactionType != null) { if (!"".equals(editJobTypeNameField.getValue()) && !identicalToExistingValues(transactionType, editedJobTypeName, editedJobTypeAmount)) { transactionType.setTransactionTypeName(editedJobTypeName.getValue()); transactionType.setTransactionAmount(editedJobTypeAmount.getValue()); updateTransactionTypeInDatabase(classForLogMessage, transactionType); jobtypesTable.setValue(null); editedJobTypeName.setValue(""); editedJobTypeAmount.setValue(0.0); Map<Integer, TransactionType> transactionTypes = getTransactionTypesFromUkelonnDatabase(classForLogMessage); jobTypes.removeAllItems(); jobTypes.addAll(getJobTypesFromTransactionTypes(transactionTypes.values())); } } } private boolean identicalToExistingValues(TransactionType transactionType, ObjectProperty<String> transactionTypeName, ObjectProperty<Double> transactionTypeAmount) { if (transactionType == null || transactionType.getTransactionTypeName() == null || transactionType.getTransactionAmount() == null) { return false; // Nothing to compare against, always false } boolean isIdentical = transactionType.getTransactionTypeName().equals(transactionTypeName.getValue()) && transactionType.getTransactionAmount().equals(transactionTypeAmount.getValue()); return isIdentical; } })); jobtypesform.addComponent(editJobLayout); jobtypes.addTab(jobtypesform, "Endre jobbtyper"); jobtypeAdminTab.addComponent(jobtypes); accordion.addTab(jobtypeAdminTab, "Administrere jobbtyper"); VerticalLayout paymentstypeadminTab = new VerticalLayout(); Accordion paymentstypeadmin = new Accordion(); VerticalLayout newpaymenttypeTab = new VerticalLayout(); paymentstypeadmin.addTab(newpaymenttypeTab, "Lag ny utbetalingstype"); VerticalLayout paymenttypesform = new VerticalLayout(); paymentstypeadmin.addTab(paymenttypesform, "Endre utbetalingstyper"); paymentstypeadminTab.addComponent(paymentstypeadmin); accordion.addTab(paymentstypeadminTab, "Endre utbetalingstyper"); VerticalLayout useradminTab = new VerticalLayout(); Accordion useradmin = new Accordion(); VerticalLayout newuserTab = new VerticalLayout(); useradmin.addTab(newuserTab, "Legg til ny bruker"); VerticalLayout changeuserpasswordTab = new VerticalLayout(); useradmin.addTab(changeuserpasswordTab, "Bytt passord på bruker"); VerticalLayout usersTab = new VerticalLayout(); useradmin.addTab(usersTab, "Endre brukere"); useradminTab.addComponent(useradmin); accordion.addTab(useradminTab, "Administrere brukere"); content.addComponent(accordion); setContent(content); } }
Implemented adding new payment type in the admin UI. Also added a listener that will set the amount for the payment for the case where the payment type is something other than payment to bank (where the default payment value is to pay the balance). Bug: The payment type combobox listener is never called, even though the syntax is exactly identical to other comboxbox value listeners that are called.
ukelonn.bundle/src/main/java/no/priv/bang/ukelonn/impl/UkelonnAdminUI.java
Implemented adding new payment type in the admin UI.
Java
apache-2.0
313565cdfa9d1f6b9f57299ad83867b70948beca
0
enioka/jqm,enioka/jqm,enioka/jqm,enioka/jqm,enioka/jqm
/** * Copyright �� 2013 enioka. All rights reserved * Authors: Pierre COPPEE ([email protected]) * Contributors : Marc-Antoine GOUILLART ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.enioka.jqm.tools; import java.net.URL; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.log4j.Logger; import com.enioka.jqm.jpamodel.Queue; class ThreadPool { private static Logger jqmlogger = Logger.getLogger(ThreadPool.class); private Queue queue = null; private int nbThread = 0; private ExecutorService pool = null; private Map<String, URL[]> cache = null; ThreadPool(Queue queue, int n, Map<String, URL[]> cache) { this.queue = queue; this.cache = cache; nbThread = n; pool = Executors.newFixedThreadPool(nbThread); } void run(com.enioka.jqm.jpamodel.JobInstance ji, Polling p, boolean stop) { jqmlogger.info("Job instance will be inserted inside a thread pool: " + ji.getId()); jqmlogger.debug("ThreadPool ActualNbThread: " + p.getActualNbThread()); if (stop) { System.exit(0); } else { pool.submit(new Loader(ji, cache, p)); } } Queue getQueue() { return queue; } int getNbThread() { return nbThread; } }
serious/JobBaseAPI/src/main/java/com/enioka/jqm/tools/ThreadPool.java
/** * Copyright �� 2013 enioka. All rights reserved * Authors: Pierre COPPEE ([email protected]) * Contributors : Marc-Antoine GOUILLART ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.enioka.jqm.tools; import java.net.URL; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.log4j.Logger; import com.enioka.jqm.jpamodel.Queue; class ThreadPool { private static Logger jqmlogger = Logger.getLogger(ThreadPool.class); private Queue queue = null; private int nbThread = 0; private ExecutorService pool = null; private Map<String, URL[]> cache = null; private JqmEngine engine; ThreadPool(Queue queue, int n, Map<String, URL[]> cache) { this.queue = queue; this.cache = cache; nbThread = n; pool = Executors.newFixedThreadPool(nbThread); } void run(com.enioka.jqm.jpamodel.JobInstance ji, Polling p, boolean stop) { jqmlogger.info("Job instance will be inserted inside a thread pool: " + ji.getId()); jqmlogger.debug("ThreadPool ActualNbThread: " + p.getActualNbThread()); if (stop) { System.exit(0); } else { pool.submit(new Loader(ji, cache, p)); } } Queue getQueue() { return queue; } int getNbThread() { return nbThread; } }
Removed unused field Signed-off-by: marcanpilami <[email protected]>
serious/JobBaseAPI/src/main/java/com/enioka/jqm/tools/ThreadPool.java
Removed unused field
Java
apache-2.0
12451f1804e12fbec0c0ed4f29adfe5c5465b8d2
0
rtkasodariya/interview,UseOnly/interview,kumargauravtiwary/interview,pankajbhanu/interview,mission-peace/interview,prabasn/interview,allwinsr/interview,mission-peace/interview,cvasani/interview,harshul1610/interview,priyatransbit/interview,saviaga/interview,Chrisgcy/interview,onlymilap/interview,chinmayakelkar/interview,rtkasodariya/interview,sazibislam/codeAlgorithom,saibimajdi/interview,manish211/interview-1,welcomenilesh/interview,mdtareque/interview,aman-iitj/interview,nadeembhati/interview,hackerFarmaan/interview,chiranjeevjain/interview,Shekharrajak/interview,TheKingSlayer/interview,rootmonty/interview,sonamsh/interview,mission-peace/interview,Shekharrajak/interview,lsingal/interview
package com.interview.string; /** .Given an input string S write a function which returns true if it satisfies S = nT. Basically you have to find if a given string can be represented from a substring by iterating it �n� times. n >= 2 An example would suffice Function should return true if 1) S = abab 2) S = abcdabcd 3) S = abcabcabc 4) S = zzxzzxzzx Function should return false if 1) S = abac 2) S = abcdabbd 3) S = abcabcefg 4) S = zzxzzyzzx */ public class NTMatch { public boolean match(char str[]){ int kmp[] = buildKMP(str); int index = kmp[str.length-1]; //reason end is this rather than index+1 because //if our string was ababab for KMP we would have index as 4 at str.length-1 and we //want end to be 1 rather than 5 int end = str.length - index-1; if(end >= str.length/2){ return false; } int j = end+1; int i = 0; while(j < str.length){ if(str[i] != str[j]){ return false; } i = (i+1)%(end+1); j++; } if(i == 0){ return true; } return false; } private int[] buildKMP(char str[]){ int result[] = new int[str.length]; int i =1; result[0] = 0; int len =0; while(i < str.length){ if(str[i] == str[len]){ len++; result[i] = len; i++; }else{ if(len != 0){ len = result[len-1]; }else{ len =0; result[i] = 0; i++; } } } return result; } public static void main(String args[]){ NTMatch ntMatch = new NTMatch(); System.out.println(ntMatch.match("bababababa".toCharArray())); } }
src/com/interview/string/NTMatch.java
package com.interview.string; /** .Given an input string S write a function which returns true if it satisfies S = nT. Basically you have to find if a given string can be represented from a substring by iterating it �n� times. n >= 2 An example would suffice � Function should return true if 1) S = �abab� 2) S = �abcdabcd� 3) S = �abcabcabc� 4) S = �zzxzzxzzx� Function should return false if 1) S = �abac� 2) S = �abcdabbd� 3) S = �abcabcefg� 4) S = �zzxzzyzzx� */ public class NTMatch { public boolean match(char str[]){ int kmp[] = buildKMP(str); int index = kmp[str.length-1]; int end = str.length - index-1; if(end >= str.length/2){ return false; } int j = end+1; int i = 0; while(j < str.length){ if(str[i] != str[j]){ return false; } i = (i+1)%(end+1); j++; } if(i == 0){ return true; } return false; } private int[] buildKMP(char str[]){ int result[] = new int[str.length]; int i =1; result[0] = 0; int len =0; while(i < str.length){ if(str[i] == str[len]){ len++; result[i] = len; i++; }else{ if(len != 0){ len = result[len-1]; }else{ len =0; result[i] = 0; i++; } } } return result; } public static void main(String args[]){ NTMatch ntMatch = new NTMatch(); System.out.println(ntMatch.match("bababababa".toCharArray())); } }
Removed special characters
src/com/interview/string/NTMatch.java
Removed special characters
Java
apache-2.0
0021c192b4bed584dc092821a0274c61e0597948
0
neo4j/neo4j-ogm,neo4j/neo4j-ogm,neo4j/neo4j-ogm,neo4j/neo4j-ogm
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This product is licensed to you under the Apache License, Version 2.0 (the "License"). * You may not use this product except in compliance with the License. * * This product may include a number of subcomponents with * separate copyright notices and license terms. Your use of the source * code for these subcomponents is subject to the terms and * conditions of the subcomponent's license, as noted in the LICENSE file. */ package org.neo4j.ogm.autoindex; import static java.util.stream.Collectors.*; import static org.assertj.core.api.Assertions.*; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.function.Consumer; import java.util.stream.Stream; import java.util.stream.StreamSupport; import org.assertj.core.api.AbstractThrowableAssert; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.graphdb.Result; import org.neo4j.graphdb.Transaction; import org.neo4j.graphdb.schema.ConstraintDefinition; import org.neo4j.graphdb.schema.IndexDefinition; import org.neo4j.ogm.config.Configuration; import org.neo4j.ogm.session.SessionFactory; import org.neo4j.ogm.testutil.MultiDriverTestClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Must not end with "Test" so it does not run on TC. * * @author Frantisek Hartman * @author Michael J. Simons */ public abstract class BaseAutoIndexManagerTestClass extends MultiDriverTestClass { private static final Logger logger = LoggerFactory.getLogger(BaseAutoIndexManagerTestClass.class); private static final String[] COMMUNITY_INDEXES = { "INDEX ON :User(email)", }; private static final String[] COMMUNITY_CONSTRAINTS = { "CONSTRAINT ON (user:User) ASSERT user.login IS UNIQUE", }; private static final String[] ENTERPRISE_INDEXES = { "INDEX ON :User(email)", "INDEX ON :User(lat, lon)" }; private static final String[] ENTERPRISE_CONSTRAINTS = { "CONSTRAINT ON (user:User) ASSERT user.login IS UNIQUE", "CONSTRAINT ON (user:User) ASSERT (user.key, user.key2) IS NODE KEY", "CONSTRAINT ON (user:User) ASSERT exists(user.address)", "CONSTRAINT ON ()-[rating:RATING]-() ASSERT exists(rating.stars)", }; private String[] indexes; private String[] constraints; private String[] statements; private String[] expectedIndexDefinitions; private GraphDatabaseService service; protected SessionFactory sessionFactory; public BaseAutoIndexManagerTestClass(String[] expectedIndexDefinitions, Class<?>... packages) { sessionFactory = new SessionFactory(driver, Arrays.stream(packages).map(Class::getName).toArray(String[]::new)); this.expectedIndexDefinitions = expectedIndexDefinitions; } @Before public void setUp() { service = getGraphDatabaseService(); service.execute("MATCH (n) DETACH DELETE n"); String[] existingConstraints = service.execute("CALL db.constraints()").stream().map(r -> r.get("description")).toArray(String[]::new); executeDrop(existingConstraints); if (isEnterpriseEdition() && isVersionOrGreater("3.2.0")) { indexes = ENTERPRISE_INDEXES; constraints = ENTERPRISE_CONSTRAINTS; statements = Stream.of(ENTERPRISE_INDEXES, ENTERPRISE_CONSTRAINTS).flatMap(Stream::of) .toArray(String[]::new); } else { indexes = COMMUNITY_INDEXES; constraints = COMMUNITY_CONSTRAINTS; statements = Stream.of(COMMUNITY_INDEXES, COMMUNITY_CONSTRAINTS).flatMap(Stream::of).toArray(String[]::new); } } @After public void tearDown() throws Exception { executeDrop(expectedIndexDefinitions); executeDrop(statements); } @Test public void testAutoIndexNoneNoIndexIsCreated() { runAutoIndex("none"); executeForIndexes(indexes -> { assertThat(indexes).isEmpty(); }); executeForConstraints(constraints -> { assertThat(constraints).isEmpty(); }); } @Test public void testAutoIndexNoneNoIndexIsDropped() { executeCreate(statements); runAutoIndex("none"); executeForIndexes(indexes -> { assertThat(indexes).hasSize(this.indexes.length); }); executeForConstraints(constraints -> { assertThat(constraints).hasSize(this.constraints.length); }); } @Test public void testIndexesAreSuccessfullyValidated() { executeCreate(expectedIndexDefinitions); runAutoIndex("validate"); } @Test public void testIndexValidationFailsOnMissingIndex() { final AbstractThrowableAssert<?, ? extends Throwable> assertThatException = assertThatThrownBy( () -> runAutoIndex("validate")) .isInstanceOf(MissingIndexException.class) .hasMessageContaining("Validation of Constraints and Indexes failed. Could not find the following :"); for (String definition : this.expectedIndexDefinitions) { assertThatException.hasMessageContaining(definition); } } @Test public void testAutoIndexAssertDropsAllIndexesAndCreatesExisting() { executeCreate(statements); runAutoIndex("assert"); List<Object> all = new ArrayList<>(); executeForIndexes(all::addAll); executeForConstraints(all::addAll); assertThat(all).hasSize(this.expectedIndexDefinitions.length); } @Test public void testAutoIndexUpdateKeepIndexesAndCreateNew() { executeCreate(statements); runAutoIndex("update"); List<Object> all = new ArrayList<>(); executeForIndexes(all::addAll); executeForConstraints(all::addAll); int expectedNumberOfIndexes = this.indexes.length + this.constraints.length + this.expectedIndexDefinitions.length; assertThat(all).hasSize(expectedNumberOfIndexes); } @Test public void testAutoIndexUpdateIndexExistsDoNothing() { executeCreate(expectedIndexDefinitions); runAutoIndex("update"); List<Object> all = new ArrayList<>(); executeForIndexes(all::addAll); executeForConstraints(all::addAll); assertThat(all).hasSize(this.expectedIndexDefinitions.length); } @Test public void testAutoIndexDumpCreatesIndex() throws IOException { File file = File.createTempFile("test", ".cql"); file.deleteOnExit(); Configuration configuration = getBaseConfiguration() .autoIndex("dump") .generatedIndexesOutputDir(file.getParent()) .generatedIndexesOutputFilename(file.getName()) .build(); sessionFactory.runAutoIndexManager(configuration); assertThat(file).exists().canRead(); assertThat(contentOf(file)) .contains(Arrays.stream(expectedIndexDefinitions).map(d -> "CREATE " + d).toArray(String[]::new)); } void runAutoIndex(String mode) { Configuration configuration = getBaseConfiguration().autoIndex(mode).build(); sessionFactory.runAutoIndexManager(configuration); } void executeForIndexes(Consumer<List<IndexDefinition>> consumer) { try (Transaction tx = service.beginTx()) { Iterable<IndexDefinition> indexes = service.schema().getIndexes(); List<IndexDefinition> pureIndexes = StreamSupport.stream(indexes.spliterator(), false) .filter(indexDefinition -> !indexDefinition.isConstraintIndex()) .collect(toList()); consumer.accept(pureIndexes); tx.success(); } } void executeForConstraints(Consumer<List<ConstraintDefinition>> consumer) { try (Transaction tx = service.beginTx()) { List<ConstraintDefinition> constraints = StreamSupport .stream(service.schema().getConstraints().spliterator(), false) .collect(toList()); consumer.accept(constraints); tx.success(); } } void executeCreate(String... statements) { for (String statement : statements) { logger.info("Execute CREATE " + statement); Result execute = service.execute("CREATE " + statement); execute.close(); } } void executeDrop(String... statements) { for (String statement : statements) { // need to handle transaction manually because when the service.execute fails with exception // it does not clean up the tx resources, leading to deadlock later Transaction tx = service.beginTx(); try { service.execute("DROP " + statement); tx.success(); } catch (Exception e) { logger.trace("Could not execute drop for statement (this is likely expected) {}", statement, e); tx.failure(); } tx.close(); } } }
test/src/test/java/org/neo4j/ogm/autoindex/BaseAutoIndexManagerTestClass.java
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This product is licensed to you under the Apache License, Version 2.0 (the "License"). * You may not use this product except in compliance with the License. * * This product may include a number of subcomponents with * separate copyright notices and license terms. Your use of the source * code for these subcomponents is subject to the terms and * conditions of the subcomponent's license, as noted in the LICENSE file. */ package org.neo4j.ogm.autoindex; import static java.util.stream.Collectors.*; import static org.assertj.core.api.Assertions.*; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.function.Consumer; import java.util.stream.Stream; import java.util.stream.StreamSupport; import org.assertj.core.api.AbstractThrowableAssert; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.graphdb.Result; import org.neo4j.graphdb.Transaction; import org.neo4j.graphdb.schema.ConstraintDefinition; import org.neo4j.graphdb.schema.IndexDefinition; import org.neo4j.ogm.config.Configuration; import org.neo4j.ogm.session.SessionFactory; import org.neo4j.ogm.testutil.MultiDriverTestClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Must not end with "Test" so it does not run on TC. * * @author Frantisek Hartman * @author Michael J. Simons */ public abstract class BaseAutoIndexManagerTestClass extends MultiDriverTestClass { private static final Logger logger = LoggerFactory.getLogger(BaseAutoIndexManagerTestClass.class); private static final String[] COMMUNITY_INDEXES = { "INDEX ON :User(email)", }; private static final String[] COMMUNITY_CONSTRAINTS = { "CONSTRAINT ON (user:User) ASSERT user.login IS UNIQUE", }; private static final String[] ENTERPRISE_INDEXES = { "INDEX ON :User(email)", "INDEX ON :User(lat, lon)" }; private static final String[] ENTERPRISE_CONSTRAINTS = { "CONSTRAINT ON (user:User) ASSERT user.login IS UNIQUE", "CONSTRAINT ON (user:User) ASSERT (user.key, user.key2) IS NODE KEY", "CONSTRAINT ON (user:User) ASSERT exists(user.address)", "CONSTRAINT ON ()-[rating:RATING]-() ASSERT exists(rating.stars)", }; private String[] indexes; private String[] constraints; private String[] statements; private String[] expectedIndexDefinitions; private GraphDatabaseService service; protected SessionFactory sessionFactory; public BaseAutoIndexManagerTestClass(String[] expectedIndexDefinitions, Class<?>... packages) { sessionFactory = new SessionFactory(driver, Arrays.stream(packages).map(Class::getName).toArray(String[]::new)); this.expectedIndexDefinitions = expectedIndexDefinitions; } @Before public void setUp() { service = getGraphDatabaseService(); String[] existingConstraints = service.execute("CALL db.constraints()").stream().map(r -> r.get("description")).toArray(String[]::new); executeDrop(existingConstraints); if (isEnterpriseEdition() && isVersionOrGreater("3.2.0")) { indexes = ENTERPRISE_INDEXES; constraints = ENTERPRISE_CONSTRAINTS; statements = Stream.of(ENTERPRISE_INDEXES, ENTERPRISE_CONSTRAINTS).flatMap(Stream::of) .toArray(String[]::new); } else { indexes = COMMUNITY_INDEXES; constraints = COMMUNITY_CONSTRAINTS; statements = Stream.of(COMMUNITY_INDEXES, COMMUNITY_CONSTRAINTS).flatMap(Stream::of).toArray(String[]::new); } } @After public void tearDown() throws Exception { executeDrop(expectedIndexDefinitions); executeDrop(statements); } @Test public void testAutoIndexNoneNoIndexIsCreated() { runAutoIndex("none"); executeForIndexes(indexes -> { assertThat(indexes).isEmpty(); }); executeForConstraints(constraints -> { assertThat(constraints).isEmpty(); }); } @Test public void testAutoIndexNoneNoIndexIsDropped() { executeCreate(statements); runAutoIndex("none"); executeForIndexes(indexes -> { assertThat(indexes).hasSize(this.indexes.length); }); executeForConstraints(constraints -> { assertThat(constraints).hasSize(this.constraints.length); }); } @Test public void testIndexesAreSuccessfullyValidated() { executeCreate(expectedIndexDefinitions); runAutoIndex("validate"); } @Test public void testIndexValidationFailsOnMissingIndex() { final AbstractThrowableAssert<?, ? extends Throwable> assertThatException = assertThatThrownBy( () -> runAutoIndex("validate")) .isInstanceOf(MissingIndexException.class) .hasMessageContaining("Validation of Constraints and Indexes failed. Could not find the following :"); for (String definition : this.expectedIndexDefinitions) { assertThatException.hasMessageContaining(definition); } } @Test public void testAutoIndexAssertDropsAllIndexesAndCreatesExisting() { executeCreate(statements); runAutoIndex("assert"); List<Object> all = new ArrayList<>(); executeForIndexes(all::addAll); executeForConstraints(all::addAll); assertThat(all).hasSize(this.expectedIndexDefinitions.length); } @Test public void testAutoIndexUpdateKeepIndexesAndCreateNew() { executeCreate(statements); runAutoIndex("update"); List<Object> all = new ArrayList<>(); executeForIndexes(all::addAll); executeForConstraints(all::addAll); int expectedNumberOfIndexes = this.indexes.length + this.constraints.length + this.expectedIndexDefinitions.length; assertThat(all).hasSize(expectedNumberOfIndexes); } @Test public void testAutoIndexUpdateIndexExistsDoNothing() { executeCreate(expectedIndexDefinitions); runAutoIndex("update"); List<Object> all = new ArrayList<>(); executeForIndexes(all::addAll); executeForConstraints(all::addAll); assertThat(all).hasSize(this.expectedIndexDefinitions.length); } @Test public void testAutoIndexDumpCreatesIndex() throws IOException { File file = File.createTempFile("test", ".cql"); file.deleteOnExit(); Configuration configuration = getBaseConfiguration() .autoIndex("dump") .generatedIndexesOutputDir(file.getParent()) .generatedIndexesOutputFilename(file.getName()) .build(); sessionFactory.runAutoIndexManager(configuration); assertThat(file).exists().canRead(); assertThat(contentOf(file)) .contains(Arrays.stream(expectedIndexDefinitions).map(d -> "CREATE " + d).toArray(String[]::new)); } void runAutoIndex(String mode) { Configuration configuration = getBaseConfiguration().autoIndex(mode).build(); sessionFactory.runAutoIndexManager(configuration); } void executeForIndexes(Consumer<List<IndexDefinition>> consumer) { try (Transaction tx = service.beginTx()) { Iterable<IndexDefinition> indexes = service.schema().getIndexes(); List<IndexDefinition> pureIndexes = StreamSupport.stream(indexes.spliterator(), false) .filter(indexDefinition -> !indexDefinition.isConstraintIndex()) .collect(toList()); consumer.accept(pureIndexes); tx.success(); } } void executeForConstraints(Consumer<List<ConstraintDefinition>> consumer) { try (Transaction tx = service.beginTx()) { List<ConstraintDefinition> constraints = StreamSupport .stream(service.schema().getConstraints().spliterator(), false) .collect(toList()); consumer.accept(constraints); tx.success(); } } void executeCreate(String... statements) { for (String statement : statements) { logger.info("Execute CREATE " + statement); Result execute = service.execute("CREATE " + statement); execute.close(); } } void executeDrop(String... statements) { for (String statement : statements) { // need to handle transaction manually because when the service.execute fails with exception // it does not clean up the tx resources, leading to deadlock later Transaction tx = service.beginTx(); try { service.execute("DROP " + statement); tx.success(); } catch (Exception e) { logger.trace("Could not execute drop for statement (this is likely expected) {}", statement, e); tx.failure(); } tx.close(); } } }
Fix broken fix from 6742d48d.
test/src/test/java/org/neo4j/ogm/autoindex/BaseAutoIndexManagerTestClass.java
Fix broken fix from 6742d48d.
Java
apache-2.0
4b97db862dddd37b19fbaad1ed097a64102f75b9
0
blindpirate/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,gradle/gradle,gstevey/gradle,blindpirate/gradle,gradle/gradle,lsmaira/gradle,lsmaira/gradle,blindpirate/gradle,gradle/gradle,gstevey/gradle,lsmaira/gradle,lsmaira/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,lsmaira/gradle,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,lsmaira/gradle,gstevey/gradle,gstevey/gradle,robinverduijn/gradle,lsmaira/gradle,gradle/gradle,gstevey/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,gradle/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,blindpirate/gradle,gstevey/gradle,gstevey/gradle,blindpirate/gradle,lsmaira/gradle,robinverduijn/gradle
/* * Copyright 2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.tasks.testing.testng; import org.gradle.api.Action; import org.gradle.api.GradleException; import org.gradle.api.InvalidUserDataException; import org.gradle.api.internal.initialization.loadercache.ClassLoaderCache; import org.gradle.api.internal.plugins.DslObject; import org.gradle.api.internal.tasks.testing.TestClassLoaderFactory; import org.gradle.api.internal.tasks.testing.TestClassProcessor; import org.gradle.api.internal.tasks.testing.TestFramework; import org.gradle.api.internal.tasks.testing.WorkerTestClassProcessorFactory; import org.gradle.api.internal.tasks.testing.detection.ClassFileExtractionManager; import org.gradle.api.internal.tasks.testing.filter.DefaultTestFilter; import org.gradle.api.reporting.DirectoryReport; import org.gradle.api.tasks.testing.Test; import org.gradle.api.tasks.testing.testng.TestNGOptions; import org.gradle.internal.TimeProvider; import org.gradle.internal.actor.ActorFactory; import org.gradle.internal.id.IdGenerator; import org.gradle.internal.reflect.Instantiator; import org.gradle.internal.service.ServiceRegistry; import org.gradle.process.internal.worker.WorkerProcessBuilder; import java.io.File; import java.io.Serializable; import java.util.List; import java.util.concurrent.Callable; public class TestNGTestFramework implements TestFramework { private TestNGOptions options; private final TestNGDetector detector; private final Test testTask; private final DefaultTestFilter filter; private final TestClassLoaderFactory classLoaderFactory; public TestNGTestFramework(final Test testTask, DefaultTestFilter filter, Instantiator instantiator, ClassLoaderCache classLoaderCache) { this.testTask = testTask; this.filter = filter; options = instantiator.newInstance(TestNGOptions.class, testTask.getProject().getProjectDir()); conventionMapOutputDirectory(options, testTask.getReports().getHtml()); detector = new TestNGDetector(new ClassFileExtractionManager(testTask.getTemporaryDirFactory())); classLoaderFactory = new TestClassLoaderFactory(classLoaderCache, testTask); } private static void conventionMapOutputDirectory(TestNGOptions options, final DirectoryReport html) { new DslObject(options).getConventionMapping().map("outputDirectory", new Callable<File>() { public File call() { return html.getDestination(); } }); } @Override public WorkerTestClassProcessorFactory getProcessorFactory() { verifyConfigFailurePolicy(); verifyPreserveOrder(); verifyGroupByInstances(); List<File> suiteFiles = options.getSuites(testTask.getTemporaryDir()); TestNGSpec spec = new TestNGSpec(options, filter); return new TestClassProcessorFactoryImpl(this.options.getOutputDirectory(), spec, suiteFiles); } private void verifyConfigFailurePolicy() { if (!options.getConfigFailurePolicy().equals(TestNGOptions.DEFAULT_CONFIG_FAILURE_POLICY)) { verifyMethodExists("setConfigFailurePolicy", String.class, String.format("The version of TestNG used does not support setting config failure policy to '%s'.", options.getConfigFailurePolicy())); } } private void verifyPreserveOrder() { if (options.getPreserveOrder()) { verifyMethodExists("setPreserveOrder", boolean.class, "Preserving the order of tests is not supported by this version of TestNG."); } } private void verifyGroupByInstances() { if (options.getGroupByInstances()) { verifyMethodExists("setGroupByInstances", boolean.class, "Grouping tests by instances is not supported by this version of TestNG."); } } private void verifyMethodExists(String methodName, Class<?> parameterType, String failureMessage) { try { createTestNg().getMethod(methodName, parameterType); } catch (NoSuchMethodException e) { throw new InvalidUserDataException(failureMessage, e); } } private Class<?> createTestNg() { try { return classLoaderFactory.create().loadClass("org.testng.TestNG"); } catch (ClassNotFoundException e) { throw new GradleException("Could not load TestNG.", e); } } @Override public Action<WorkerProcessBuilder> getWorkerConfigurationAction() { return new Action<WorkerProcessBuilder>() { public void execute(WorkerProcessBuilder workerProcessBuilder) { workerProcessBuilder.sharedPackages("org.testng"); } }; } @Override public TestNGOptions getOptions() { return options; } void setOptions(TestNGOptions options) { this.options = options; } @Override public TestNGDetector getDetector() { return detector; } private static class TestClassProcessorFactoryImpl implements WorkerTestClassProcessorFactory, Serializable { private final File testReportDir; private final TestNGSpec options; private final List<File> suiteFiles; public TestClassProcessorFactoryImpl(File testReportDir, TestNGSpec options, List<File> suiteFiles) { this.testReportDir = testReportDir; this.options = options; this.suiteFiles = suiteFiles; } @Override public TestClassProcessor create(ServiceRegistry serviceRegistry) { return new TestNGTestClassProcessor(testReportDir, options, suiteFiles, serviceRegistry.get(IdGenerator.class), serviceRegistry.get(TimeProvider.class), serviceRegistry.get(ActorFactory.class)); } } }
subprojects/testing-jvm/src/main/java/org/gradle/api/internal/tasks/testing/testng/TestNGTestFramework.java
/* * Copyright 2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.tasks.testing.testng; import org.gradle.api.Action; import org.gradle.api.GradleException; import org.gradle.api.InvalidUserDataException; import org.gradle.api.internal.initialization.loadercache.ClassLoaderCache; import org.gradle.api.internal.plugins.DslObject; import org.gradle.api.internal.tasks.testing.TestClassLoaderFactory; import org.gradle.api.internal.tasks.testing.TestClassProcessor; import org.gradle.api.internal.tasks.testing.TestFramework; import org.gradle.api.internal.tasks.testing.WorkerTestClassProcessorFactory; import org.gradle.api.internal.tasks.testing.detection.ClassFileExtractionManager; import org.gradle.api.internal.tasks.testing.filter.DefaultTestFilter; import org.gradle.api.reporting.DirectoryReport; import org.gradle.api.tasks.testing.Test; import org.gradle.api.tasks.testing.testng.TestNGOptions; import org.gradle.internal.Factory; import org.gradle.internal.TimeProvider; import org.gradle.internal.actor.ActorFactory; import org.gradle.internal.id.IdGenerator; import org.gradle.internal.reflect.Instantiator; import org.gradle.internal.service.ServiceRegistry; import org.gradle.process.internal.worker.WorkerProcessBuilder; import org.gradle.util.DeprecationLogger; import java.io.File; import java.io.Serializable; import java.util.List; import java.util.concurrent.Callable; public class TestNGTestFramework implements TestFramework { private TestNGOptions options; private final TestNGDetector detector; private final Test testTask; private final DefaultTestFilter filter; private final TestClassLoaderFactory classLoaderFactory; public TestNGTestFramework(final Test testTask, DefaultTestFilter filter, Instantiator instantiator, ClassLoaderCache classLoaderCache) { this.testTask = testTask; this.filter = filter; options = instantiator.newInstance(TestNGOptions.class, testTask.getProject().getProjectDir()); conventionMapOutputDirectory(options, testTask.getReports().getHtml()); detector = new TestNGDetector(new ClassFileExtractionManager(testTask.getTemporaryDirFactory())); classLoaderFactory = new TestClassLoaderFactory(classLoaderCache, testTask); } private static void conventionMapOutputDirectory(TestNGOptions options, final DirectoryReport html) { new DslObject(options).getConventionMapping().map("outputDirectory", new Callable<File>() { public File call() { return html.getDestination(); } }); } @Override public WorkerTestClassProcessorFactory getProcessorFactory() { verifyConfigFailurePolicy(); verifyPreserveOrder(); verifyGroupByInstances(); List<File> suiteFiles = options.getSuites(testTask.getTemporaryDir()); TestNGSpec spec = DeprecationLogger.whileDisabled(new Factory<TestNGSpec>() { @Override public TestNGSpec create() { return new TestNGSpec(options, filter); } }); return new TestClassProcessorFactoryImpl(this.options.getOutputDirectory(), spec, suiteFiles); } private void verifyConfigFailurePolicy() { if (!options.getConfigFailurePolicy().equals(TestNGOptions.DEFAULT_CONFIG_FAILURE_POLICY)) { verifyMethodExists("setConfigFailurePolicy", String.class, String.format("The version of TestNG used does not support setting config failure policy to '%s'.", options.getConfigFailurePolicy())); } } private void verifyPreserveOrder() { if (options.getPreserveOrder()) { verifyMethodExists("setPreserveOrder", boolean.class, "Preserving the order of tests is not supported by this version of TestNG."); } } private void verifyGroupByInstances() { if (options.getGroupByInstances()) { verifyMethodExists("setGroupByInstances", boolean.class, "Grouping tests by instances is not supported by this version of TestNG."); } } private void verifyMethodExists(String methodName, Class<?> parameterType, String failureMessage) { try { createTestNg().getMethod(methodName, parameterType); } catch (NoSuchMethodException e) { throw new InvalidUserDataException(failureMessage, e); } } private Class<?> createTestNg() { try { return classLoaderFactory.create().loadClass("org.testng.TestNG"); } catch (ClassNotFoundException e) { throw new GradleException("Could not load TestNG.", e); } } @Override public Action<WorkerProcessBuilder> getWorkerConfigurationAction() { return new Action<WorkerProcessBuilder>() { public void execute(WorkerProcessBuilder workerProcessBuilder) { workerProcessBuilder.sharedPackages("org.testng"); } }; } @Override public TestNGOptions getOptions() { return options; } void setOptions(TestNGOptions options) { this.options = options; } @Override public TestNGDetector getDetector() { return detector; } private static class TestClassProcessorFactoryImpl implements WorkerTestClassProcessorFactory, Serializable { private final File testReportDir; private final TestNGSpec options; private final List<File> suiteFiles; public TestClassProcessorFactoryImpl(File testReportDir, TestNGSpec options, List<File> suiteFiles) { this.testReportDir = testReportDir; this.options = options; this.suiteFiles = suiteFiles; } @Override public TestClassProcessor create(ServiceRegistry serviceRegistry) { return new TestNGTestClassProcessor(testReportDir, options, suiteFiles, serviceRegistry.get(IdGenerator.class), serviceRegistry.get(TimeProvider.class), serviceRegistry.get(ActorFactory.class)); } } }
Remove unused DeprecationLogger.whileDisabled Missed when removing TestNG support for source annotations +review REVIEW-6002
subprojects/testing-jvm/src/main/java/org/gradle/api/internal/tasks/testing/testng/TestNGTestFramework.java
Remove unused DeprecationLogger.whileDisabled
Java
apache-2.0
8ef83d191a191b1dfd419b48496548ff44cb6c47
0
ricepanda/rice-git3,ricepanda/rice-git2,kuali/rice-playground,ricepanda/rice-git3,ricepanda/rice-git2,kuali/rice-playground,kuali/rice-playground,kuali/rice-playground,ricepanda/rice-git2,ricepanda/rice-git3,ricepanda/rice-git2,ricepanda/rice-git3
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.demo.uif.library.fields; import org.junit.Test; import org.kuali.rice.krad.demo.uif.library.DemoLibraryBase; import org.kuali.rice.krad.uif.UifConstants; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; /** * @author Kuali Rice Team ([email protected]) */ public class DemoFieldsInputAft extends DemoLibraryBase { /** * /kr-krad/kradsampleapp?viewId=Demo-InputFieldView&methodToCall=start */ public static final String BOOKMARK_URL = "/kr-krad/kradsampleapp?viewId=Demo-InputFieldView&methodToCall=start"; @Override public String getBookmarkUrl() { return BOOKMARK_URL; } @Override protected void navigate() throws Exception { navigateToLibraryDemo("Fields", "Input Field"); } protected void testInputFieldDefault() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example1"); WebElement field = findElement(By.cssSelector("div[data-label='InputField 1']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + fieldId); assertIsVisible("label[for='" + controlId + "']"); WebElement label = findElement(By.cssSelector("label[for='" + controlId + "']"), field); if (!label.getText().contains("InputField 1:")) { fail("Label text does not match"); } assertIsVisible("#" + controlId); waitAndType(By.cssSelector("#" + controlId), "Test InputField"); // validate that the value comes after the label findElement(By.cssSelector("label[data-label_for='" + fieldId + "'] + input[id='" + controlId + "']"), exampleDiv); } protected void testInputFieldAltControl() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example2"); WebElement field = findElement(By.cssSelector("div[data-label='InputField 2']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + fieldId); assertIsVisible("label[for='" + controlId + "']"); WebElement label = findElement(By.cssSelector("label[for='" + controlId + "']"), field); if (!label.getText().contains("InputField 2:")) { fail("Label text does not match"); } assertIsVisible("#" + controlId); waitAndType(By.cssSelector("#" + controlId), "Test InputField"); // validate that the value comes after the label findElement(By.cssSelector("label[data-label_for='" + fieldId + "'] + textarea[id='" + controlId + "']"), exampleDiv); } protected void testInputFieldInstructionalText() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example3"); WebElement field = findElement(By.cssSelector("div[data-label='InputField 3']"), exampleDiv); String fieldId = field.getAttribute("id"); String instructionalTextId = fieldId + UifConstants.IdSuffixes.INSTRUCTIONAL; String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + instructionalTextId); assertTextPresent("Instructions for this field", "#" + instructionalTextId, "InputField value not correct"); // validate that the instructional text comes after the label findElement(By.cssSelector("label[data-label_for='" + fieldId + "'] + p[id='" + instructionalTextId + "']"), exampleDiv); // validate that the value comes after the instructional text findElement(By.cssSelector("p[id='" + instructionalTextId + "'] + input[id='" + controlId + "']"), exampleDiv); } protected void testInputFieldConstraintText() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example4"); WebElement field = findElement(By.cssSelector("div[data-label='InputField 4']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; String constraintTextId = fieldId + UifConstants.IdSuffixes.CONSTRAINT; assertIsVisible("#" + constraintTextId); assertTextPresent("Text to tell users about constraints this field may have", "#" + constraintTextId, "InputField value not correct"); // validate that the value comes after the label findElement(By.cssSelector("label[data-label_for='" + fieldId + "'] + input[id='" + controlId + "']"), exampleDiv); // validate that the constraint text comes after the value findElement(By.cssSelector("input[id='" + controlId + "'] ~ p[id='" + constraintTextId + "']"), exampleDiv); } protected void testInputFieldLabelTop() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example5"); WebElement field = findElement(By.cssSelector("div[data-label='Label Top Field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + fieldId); assertIsVisible("label[for='" + controlId + "']"); WebElement label = findElement(By.cssSelector("label[for='" + controlId + "']"), field); if (!label.getText().contains("Label Top Field")) { fail("Label text does not match"); } WebElement labelSpan = findElement(By.cssSelector("label[data-label_for='" + fieldId + "']"), field); // top and bottom add the uif-labelBlock class if (!labelSpan.getAttribute("class").contains("uif-labelBlock")) { fail("Label span does not contain the appropriate class expected"); } } protected void testInputFieldLabelRight() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example6"); WebElement field = findElement(By.cssSelector("div[data-label='Label Right Field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + controlId); assertIsVisible("#" + fieldId); assertIsVisible("label[for='" + controlId + "']"); WebElement label = findElement(By.cssSelector("label[for='" + controlId + "']"), field); if (!label.getText().contains("Label Right Field")) { fail("Label text does not match"); } // validate that the label comes after the value findElement(By.cssSelector("input[id='" + controlId + "'] + label[data-label_for='" + fieldId + "']"), exampleDiv); } protected void testInputFieldQuickfinder() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example7"); WebElement field = findElement(By.cssSelector("div[data-label='Quickfinder Field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; String quickfinderId = findElement(By.cssSelector(".input-group-btn .uif-actionLink"), field).getAttribute("id"); // validate that the quickfinder comes after the value findElement(By.cssSelector("input[id='" + controlId + "'] + .input-group-btn a[id='" + quickfinderId + "']"), exampleDiv); assertIsVisible("#" + quickfinderId); waitAndClickById(quickfinderId); Thread.sleep(2000); driver.switchTo().frame(driver.findElement(By.cssSelector(".fancybox-iframe"))); WebElement travelAccountNumberField = driver.findElement(By.cssSelector( "div[data-label='Travel Account Number']")); String travelAccountNumberFieldId = travelAccountNumberField.getAttribute("id"); String travelAccountNumberControlId = travelAccountNumberFieldId + UifConstants.IdSuffixes.CONTROL; findElement(By.cssSelector("#" + travelAccountNumberControlId), travelAccountNumberField).sendKeys("a1"); waitAndClickSearch3(); waitAndClickReturnValue(); waitAndClickByLinkText("Quickfinder"); assertElementPresentByXpath("//input[@name='inputField7' and @value='a1']"); } protected void testInputFieldInquiry() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example9"); WebElement field = findElement(By.cssSelector("div[data-label='Inquiry Field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; String inquiryId = field.findElement(By.cssSelector(".uif-actionImage")).getAttribute("id"); // validate that the inquiry comes after the value findElement(By.cssSelector("input[id='" + controlId + "'] + input[id='" + inquiryId + "']"), exampleDiv); assertIsVisible("#" + inquiryId); waitAndClickById(inquiryId); Thread.sleep(2000); driver.switchTo().frame(driver.findElement(By.cssSelector(".fancybox-iframe"))); assertTextPresent("Travel Account"); selectTopFrame(); gotoIframeByXpath("//iframe[@class='fancybox-iframe']"); waitAndClickButtonByText("Close"); selectTopFrame(); } protected void testInputFieldRequired() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example11"); WebElement field = findElement(By.cssSelector("div[data-label='Required Field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; String errorId = fieldId + UifConstants.IdSuffixes.ERRORS; WebElement requiredAsterisk = findElement(By.cssSelector("span.uif-requiredMessage"), field); if (!requiredAsterisk.getText().contains("*")) { fail("Label asterisk for required field does not appear"); } assertIsVisible("#" + controlId); waitAndClick(By.cssSelector("#" + controlId)); Thread.sleep(3000); waitAndClick(By.cssSelector("#" + fieldId)); fireMouseOverEventByName("inputField11"); if (!field.getAttribute("class").contains("uif-hasError")) { fail("Control does not show error class"); } assertElementPresent("#" + errorId + " img[src$='/krad/images/validation/error.png']"); } protected void testInputFieldUppercase() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example12"); WebElement field = findElement(By.cssSelector("div[data-label='Uppercase field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + controlId); waitAndType(By.cssSelector("#" + controlId), "Test InputField"); assertTextNotPresent("TEST INPUTFIELD", "Control text did not appear as uppercase"); } protected void testInputFieldWidgetInputOnlyWithQuickFinder() throws Exception { //There is nothing under this section on which tests can be applied. waitAndClickByLinkText("widgetInputOnly with Quickfinder"); } protected void testInputFieldWidgetInputOnlyWithInquiry() throws Exception { waitAndClickByLinkText("widgetInputOnly with Inquiry"); waitAndClickByXpath("//div[@data-parent='Demo-InputField-Example10']/span/input[@type='image' and @title='Direct Inquiry']"); gotoIframeByXpath("//iframe[@class='fancybox-iframe']"); waitForTextPresent("a2"); waitForTextPresent("Travel Account 2"); waitForTextPresent("EAT - Expense"); waitForTextPresent("fran"); waitAndClickButtonByText("Close"); selectTopFrame(); } protected void testInputFieldDisableNativeAutocomplete() throws Exception { waitAndClickByLinkText("Disable Native Autocomplete"); waitForElementPresentByXpath("//input[@name='inputField13' and @autocomplete='off']"); } protected void testInputFieldInputAddons() throws Exception { waitAndClickByLinkText("Input Addons"); waitForTextPresent(".00 "); waitForElementPresentByXpath("//div[@data-parent='Demo-InputField-Example14']/div/span/a"); waitForElementPresentByXpath("//div[@data-parent='Demo-InputField-Example14']/div/span/a[@class='uif-actionLink icon-facebook3']"); waitForElementPresentByXpath("//div[@data-parent='Demo-InputField-Example14']/div/span/a[@class='uif-actionLink icon-twitter3']"); waitForElementPresentByXpath("//div[@data-parent='Demo-InputField-Example14']/div/span/a[@class='uif-actionLink icon-youtube']"); } protected void testInputFieldExamples() throws Exception { testInputFieldDefault(); testInputFieldAltControl(); testInputFieldInstructionalText(); testInputFieldConstraintText(); // testInputFieldLabelTop(); // removed from example // testInputFieldLabelRight(); // removed from example testInputFieldQuickfinder(); testInputFieldWidgetInputOnlyWithQuickFinder(); testInputFieldUppercase(); testInputFieldDisableNativeAutocomplete(); testInputFieldInputAddons(); testInputFieldRequired(); testInputFieldInquiry(); } @Test public void testInputFieldExamplesBookmark() throws Exception { testInputFieldExamples(); passed(); } @Test public void testInputFieldExamplesNav() throws Exception { testInputFieldExamples(); passed(); } }
rice-framework/krad-sampleapp/web/src/it/java/org/kuali/rice/krad/demo/uif/library/fields/DemoFieldsInputAft.java
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.demo.uif.library.fields; import org.junit.Test; import org.kuali.rice.krad.demo.uif.library.DemoLibraryBase; import org.kuali.rice.krad.uif.UifConstants; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; /** * @author Kuali Rice Team ([email protected]) */ public class DemoFieldsInputAft extends DemoLibraryBase { /** * /kr-krad/kradsampleapp?viewId=Demo-InputFieldView&methodToCall=start */ public static final String BOOKMARK_URL = "/kr-krad/kradsampleapp?viewId=Demo-InputFieldView&methodToCall=start"; @Override public String getBookmarkUrl() { return BOOKMARK_URL; } @Override protected void navigate() throws Exception { navigateToLibraryDemo("Fields", "Input Field"); } protected void testInputFieldDefault() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example1"); WebElement field = findElement(By.cssSelector("div[data-label='InputField 1']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + fieldId); assertIsVisible("label[for='" + controlId + "']"); WebElement label = findElement(By.cssSelector("label[for='" + controlId + "']"), field); if (!label.getText().contains("InputField 1:")) { fail("Label text does not match"); } assertIsVisible("#" + controlId); waitAndType(By.cssSelector("#" + controlId), "Test InputField"); // validate that the value comes after the label findElement(By.cssSelector("label[data-label_for='" + fieldId + "'] + input[id='" + controlId + "']"), exampleDiv); } protected void testInputFieldAltControl() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example2"); WebElement field = findElement(By.cssSelector("div[data-label='InputField 2']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + fieldId); assertIsVisible("label[for='" + controlId + "']"); WebElement label = findElement(By.cssSelector("label[for='" + controlId + "']"), field); if (!label.getText().contains("InputField 2:")) { fail("Label text does not match"); } assertIsVisible("#" + controlId); waitAndType(By.cssSelector("#" + controlId), "Test InputField"); // validate that the value comes after the label findElement(By.cssSelector("label[data-label_for='" + fieldId + "'] + textarea[id='" + controlId + "']"), exampleDiv); } protected void testInputFieldInstructionalText() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example3"); WebElement field = findElement(By.cssSelector("div[data-label='InputField 3']"), exampleDiv); String fieldId = field.getAttribute("id"); String instructionalTextId = fieldId + UifConstants.IdSuffixes.INSTRUCTIONAL + UifConstants.IdSuffixes.SPAN; String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + instructionalTextId); assertTextPresent("Instructions for this field", "#" + instructionalTextId, "InputField value not correct"); // validate that the instructional text comes after the label findElement(By.cssSelector("label[data-label_for='" + fieldId + "'] + span[id='" + instructionalTextId + "']"), exampleDiv); // validate that the value comes after the instructional text findElement(By.cssSelector("span[id='" + instructionalTextId + "'] + input[id='" + controlId + "']"), exampleDiv); } protected void testInputFieldConstraintText() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example4"); WebElement field = findElement(By.cssSelector("div[data-label='InputField 4']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; String constraintTextId = fieldId + UifConstants.IdSuffixes.CONSTRAINT + UifConstants.IdSuffixes.SPAN; assertIsVisible("#" + constraintTextId); assertTextPresent("Text to tell users about constraints this field may have", "#" + constraintTextId, "InputField value not correct"); // validate that the value comes after the label findElement(By.cssSelector("label[data-label_for='" + fieldId + "'] + input[id='" + controlId + "']"), exampleDiv); // validate that the constraint text comes after the value findElement(By.cssSelector("input[id='" + controlId + "'] ~ span[id='" + constraintTextId + "']"), exampleDiv); } protected void testInputFieldLabelTop() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example5"); WebElement field = findElement(By.cssSelector("div[data-label='Label Top Field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + fieldId); assertIsVisible("label[for='" + controlId + "']"); WebElement label = findElement(By.cssSelector("label[for='" + controlId + "']"), field); if (!label.getText().contains("Label Top Field")) { fail("Label text does not match"); } WebElement labelSpan = findElement(By.cssSelector("label[data-label_for='" + fieldId + "']"), field); // top and bottom add the uif-labelBlock class if (!labelSpan.getAttribute("class").contains("uif-labelBlock")) { fail("Label span does not contain the appropriate class expected"); } } protected void testInputFieldLabelRight() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example6"); WebElement field = findElement(By.cssSelector("div[data-label='Label Right Field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + controlId); assertIsVisible("#" + fieldId); assertIsVisible("label[for='" + controlId + "']"); WebElement label = findElement(By.cssSelector("label[for='" + controlId + "']"), field); if (!label.getText().contains("Label Right Field")) { fail("Label text does not match"); } // validate that the label comes after the value findElement(By.cssSelector("input[id='" + controlId + "'] + label[data-label_for='" + fieldId + "']"), exampleDiv); } protected void testInputFieldQuickfinder() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example7"); WebElement field = findElement(By.cssSelector("div[data-label='Quickfinder Field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; String quickfinderId = findElement(By.cssSelector(".input-group-btn .uif-actionLink"), field).getAttribute("id"); // validate that the quickfinder comes after the value findElement(By.cssSelector("input[id='" + controlId + "'] + .input-group-btn a[id='" + quickfinderId + "']"), exampleDiv); assertIsVisible("#" + quickfinderId); waitAndClickById(quickfinderId); Thread.sleep(2000); driver.switchTo().frame(driver.findElement(By.cssSelector(".fancybox-iframe"))); WebElement travelAccountNumberField = driver.findElement(By.cssSelector( "div[data-label='Travel Account Number']")); String travelAccountNumberFieldId = travelAccountNumberField.getAttribute("id"); String travelAccountNumberControlId = travelAccountNumberFieldId + UifConstants.IdSuffixes.CONTROL; findElement(By.cssSelector("#" + travelAccountNumberControlId), travelAccountNumberField).sendKeys("a1"); waitAndClickSearch3(); waitAndClickReturnValue(); waitAndClickByLinkText("Quickfinder"); assertElementPresentByXpath("//input[@name='inputField7' and @value='a1']"); } protected void testInputFieldInquiry() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example9"); WebElement field = findElement(By.cssSelector("div[data-label='Inquiry Field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; String inquiryId = field.findElement(By.cssSelector(".uif-actionImage")).getAttribute("id"); // validate that the inquiry comes after the value findElement(By.cssSelector("input[id='" + controlId + "'] + input[id='" + inquiryId + "']"), exampleDiv); assertIsVisible("#" + inquiryId); waitAndClickById(inquiryId); Thread.sleep(2000); driver.switchTo().frame(driver.findElement(By.cssSelector(".fancybox-iframe"))); assertTextPresent("Travel Account"); selectTopFrame(); gotoIframeByXpath("//iframe[@class='fancybox-iframe']"); waitAndClickButtonByText("Close"); selectTopFrame(); } protected void testInputFieldRequired() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example11"); WebElement field = findElement(By.cssSelector("div[data-label='Required Field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; String errorId = fieldId + UifConstants.IdSuffixes.ERRORS; WebElement requiredAsterisk = findElement(By.cssSelector("span.uif-requiredMessage"), field); if (!requiredAsterisk.getText().contains("*")) { fail("Label asterisk for required field does not appear"); } assertIsVisible("#" + controlId); waitAndClick(By.cssSelector("#" + controlId)); Thread.sleep(3000); waitAndClick(By.cssSelector("#" + fieldId)); fireMouseOverEventByName("inputField11"); if (!field.getAttribute("class").contains("uif-hasError")) { fail("Control does not show error class"); } assertElementPresent("#" + errorId + " img[src$='/krad/images/validation/error.png']"); } protected void testInputFieldUppercase() throws Exception { WebElement exampleDiv = navigateToExample("Demo-InputField-Example12"); WebElement field = findElement(By.cssSelector("div[data-label='Uppercase field']"), exampleDiv); String fieldId = field.getAttribute("id"); String controlId = fieldId + UifConstants.IdSuffixes.CONTROL; assertIsVisible("#" + controlId); waitAndType(By.cssSelector("#" + controlId), "Test InputField"); assertTextNotPresent("TEST INPUTFIELD", "Control text did not appear as uppercase"); } protected void testInputFieldWidgetInputOnlyWithQuickFinder() throws Exception { //There is nothing under this section on which tests can be applied. waitAndClickByLinkText("widgetInputOnly with Quickfinder"); } protected void testInputFieldWidgetInputOnlyWithInquiry() throws Exception { waitAndClickByLinkText("widgetInputOnly with Inquiry"); waitAndClickByXpath("//div[@data-parent='Demo-InputField-Example10']/span/input[@type='image' and @title='Direct Inquiry']"); gotoIframeByXpath("//iframe[@class='fancybox-iframe']"); waitForTextPresent("a2"); waitForTextPresent("Travel Account 2"); waitForTextPresent("EAT - Expense"); waitForTextPresent("fran"); waitAndClickButtonByText("Close"); selectTopFrame(); } protected void testInputFieldDisableNativeAutocomplete() throws Exception { waitAndClickByLinkText("Disable Native Autocomplete"); waitForElementPresentByXpath("//input[@name='inputField13' and @autocomplete='off']"); } protected void testInputFieldInputAddons() throws Exception { waitAndClickByLinkText("Input Addons"); waitForTextPresent(".00 "); waitForElementPresentByXpath("//div[@data-parent='Demo-InputField-Example14']/div/span/a"); waitForElementPresentByXpath("//div[@data-parent='Demo-InputField-Example14']/div/span/a[@class='uif-actionLink icon-facebook3']"); waitForElementPresentByXpath("//div[@data-parent='Demo-InputField-Example14']/div/span/a[@class='uif-actionLink icon-twitter3']"); waitForElementPresentByXpath("//div[@data-parent='Demo-InputField-Example14']/div/span/a[@class='uif-actionLink icon-youtube']"); } protected void testInputFieldExamples() throws Exception { testInputFieldDefault(); testInputFieldAltControl(); testInputFieldInstructionalText(); testInputFieldConstraintText(); testInputFieldLabelTop(); testInputFieldLabelRight(); testInputFieldQuickfinder(); testInputFieldWidgetInputOnlyWithQuickFinder(); testInputFieldInquiry(); testInputFieldRequired(); testInputFieldUppercase(); testInputFieldDisableNativeAutocomplete(); testInputFieldInputAddons(); } @Test public void testInputFieldExamplesBookmark() throws Exception { testInputFieldExamples(); passed(); } @Test public void testInputFieldExamplesNav() throws Exception { testInputFieldExamples(); passed(); } }
KULRICE-11781 : Analyze CI Failures for 2.4.0-QA QA Sprint 1 - test update git-svn-id: 2a5d2b5a02908a0c4ba7967b726d8c4198d1b9ed@44835 7a7aa7f6-c479-11dc-97e2-85a2497f191d
rice-framework/krad-sampleapp/web/src/it/java/org/kuali/rice/krad/demo/uif/library/fields/DemoFieldsInputAft.java
KULRICE-11781 : Analyze CI Failures for 2.4.0-QA QA Sprint 1 - test update
Java
apache-2.0
ce26b2c168198efed6745173f00913d6814674be
0
dimagi/javarosa,dimagi/javarosa,dimagi/javarosa
package org.dimagi.view; import org.dimagi.chatscreen.ChatScreenForm; import javax.microedition.lcdui.Alert; import javax.microedition.lcdui.Canvas; import javax.microedition.lcdui.Command; import javax.microedition.lcdui.Displayable; import javax.microedition.lcdui.Graphics; import javax.microedition.lcdui.List; import org.celllife.clforms.Controller; import org.celllife.clforms.api.Prompt; import org.celllife.clforms.api.ResponseEvent; import org.celllife.clforms.util.J2MEUtil; import org.celllife.clforms.view.IPrompter; import org.celllife.clforms.view.FormView; import org.celllife.clforms.MVCComponent; public class ChatFormView extends MVCComponent implements FormView { private Controller controller; private static Command prevCommand = new Command("Prev", Command.ITEM, 3); private static Command nextCommand = new Command("Next", Command.ITEM, 3); private static Command exitCommand = new Command("Exit", Command.EXIT, 3); private static Displayable screen = null; private ChatScreenForm chatScreenForm = new ChatScreenForm(); public ChatFormView() { screen = chatScreenForm; screen.addCommand(nextCommand); screen.addCommand(prevCommand); screen.addCommand(exitCommand); } public Displayable getScreen() { return screen; } public void commandAction(Command command, Displayable s) { try { if (command == List.SELECT_COMMAND){ System.out.println("ChatterboxPromptScreen2.select()"); controller.processEvent(new ResponseEvent(ResponseEvent.GOTO,((List)screen).getSelectedIndex())); } else if (command == exitCommand){ System.out.println("ChatterboxPromptScreen2.exitCommand()"); controller.processEvent(new ResponseEvent(ResponseEvent.EXIT, -1)); } else if (command == nextCommand) { System.out.println("ChatterboxPromptScreen2.nextCommand()"); chatScreenForm.goToNextQuestion(); } else if (command == prevCommand) { System.out.println("ChatterboxPromptScreen2.prevCommand()"); chatScreenForm.goToPreviousQuestion(); } } catch (Exception e) { Alert a = new Alert("error.screen" + " 2"); //$NON-NLS-1$ a.setString(e.getMessage()); a.setTimeout(Alert.FOREVER); display.setCurrent(a); } } // Initialize. If a data member is not backed by RMS, make sure // it is uninitialized (null) before you put in values. protected void initModel() throws Exception { } protected void createView() {} protected void updateView() throws Exception {} public void registerController(Controller controller) { this.controller = controller; } public void showPrompt(Prompt prompt) { showScreen(); } }
src/org/dimagi/view/ChatFormView.java
package org.dimagi.view; import org.dimagi.chatscreen.ChatScreenForm; import javax.microedition.lcdui.Alert; import javax.microedition.lcdui.Canvas; import javax.microedition.lcdui.Command; import javax.microedition.lcdui.Displayable; import javax.microedition.lcdui.Graphics; import javax.microedition.lcdui.List; import org.celllife.clforms.Controller; import org.celllife.clforms.api.Prompt; import org.celllife.clforms.api.ResponseEvent; import org.celllife.clforms.util.J2MEUtil; import org.celllife.clforms.view.IPrompter; import org.celllife.clforms.view.FormView; import org.celllife.clforms.MVCComponent; public class ChatFormView extends MVCComponent implements FormView { private Controller controller; //private static Command saveAndReloadCommand; private static Command prevCommand = new Command("Prev", Command.ITEM, 3); private static Command nextCommand = new Command("Next", Command.ITEM, 3); private static Command exitCommand = new Command("Exit", Command.EXIT, 3); private static Displayable screen = null; private Prompt p; private ChatScreenForm chatScreenForm = new ChatScreenForm(); /** * Creates a new DForm. */ public ChatFormView() { System.out.println("ChatterboxPromptScreen2()"); screen = chatScreenForm; screen.addCommand(nextCommand); screen.addCommand(prevCommand); // screen.addCommand(saveAndReloadCommand); screen.addCommand(exitCommand); } public Displayable getScreen() { return screen; } public void commandAction(Command command, Displayable s) { System.out.println("ChatterboxPromptScreen2.commandAction()"); try { // if (command == saveAndReloadCommand) { // System.out.println("ChatterboxPromptScreen2.saveAndReload()"); // controller.processEvent(new ResponseEvent(ResponseEvent.SAVE_AND_RELOAD, -1)); // } // else if (command == List.SELECT_COMMAND){ System.out.println("ChatterboxPromptScreen2.select()"); controller.processEvent(new ResponseEvent(ResponseEvent.GOTO,((List)screen).getSelectedIndex())); } else if (command == exitCommand){ System.out.println("ChatterboxPromptScreen2.exitCommand()"); controller.processEvent(new ResponseEvent(ResponseEvent.EXIT, -1)); } else if (command == nextCommand) { System.out.println("ChatterboxPromptScreen2.nextCommand()"); chatScreenForm.goToNextQuestion(); } else if (command == prevCommand) { System.out.println("ChatterboxPromptScreen2.prevCommand()"); chatScreenForm.goToPreviousQuestion(); } } catch (Exception e) { Alert a = new Alert("error.screen" + " 2"); //$NON-NLS-1$ a.setString(e.getMessage()); a.setTimeout(Alert.FOREVER); display.setCurrent(a); } } // Initialize. If a data member is not backed by RMS, make sure // it is uninitilzed (null) before you put in values. protected void initModel() throws Exception { } protected void createView() { System.out.println("ChatterboxPromptScreen2.createView()"); // saveAndReloadCommand = new Command("SAVE&Reload", Command.ITEM, 3); // nextCommand = new Command("Next", Command.ITEM, 3); // exitCommand = new Command("Exit", Command.EXIT, 3); // screen = chatScreenForm; // // form = controller.getForm(); // screen = new List("FORM: "+form.getName(),List.IMPLICIT); // form.calculateRelevantAll(); // for(int i = 0; i<form.getPrompts().size(); i++){ // // if(((Prompt)form.getPrompt(i)).isRelevant()){ // int type = ((Prompt)form.getPrompts().elementAt(i)).getReturnType(); // String temp= J2MEUtil.getStringValue(((Prompt)form.getPrompts().elementAt(i)).getValue(),type); // //((List)screen ).append(((Prompt)form.getPrompts().elementAt(i)).getLongText()+temp,null); // // short text + value // ((List)screen ).append(((Prompt)form.getPrompts().elementAt(i)).getShortText()+" => " +temp,null); // // Long text + value on new line indent // //((List)screen ).append(((Prompt)form.getPrompts().elementAt(i)).getLongText()+"\n A:"+temp,null); // } // // } // screen.addCommand(nextCommand); // screen.addCommand(saveAndReloadCommand); // screen.addCommand(exitCommand); } protected void updateView() throws Exception { System.out.println("ChatterboxPromptScreen2.updateView()"); createView(); } public void registerController(Controller controller) { System.out.println("ChatterboxPromptScreen2.registerController()"); this.controller = controller; } public void showPrompt(Prompt prompt) { System.out.println("ChatterboxPromptScreen2.showPrompt(Prompt)"); this.p = prompt; try{ createView(); showScreen(); }catch(Exception e){ System.out.println("something wrong in CREAT VIEW\n "+e.getMessage()); e.printStackTrace(); } //repaint(); } }
[r90] [trunk/MobileMRSNoPolish] Cleaned up
src/org/dimagi/view/ChatFormView.java
[r90] [trunk/MobileMRSNoPolish] Cleaned up
Java
apache-2.0
b5b92f99e2657d37547b2353329d658955385a56
0
apache/felix-dev,apache/felix-dev,apache/felix-dev,apache/felix-dev
dependencymanager/core/src/main/java/org/apache/felix/dependencymanager/dependencies/TemporalServiceDependency.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless * required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations * under the License. */ package org.apache.felix.dependencymanager.dependencies; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import org.apache.felix.dependencymanager.DependencyActivatorBase; import org.apache.felix.dependencymanager.DependencyService; import org.apache.felix.dependencymanager.impl.Logger; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; /** * A Temporal Service dependency that can block the caller thread between service updates. Only * useful for required stateless dependencies that can be replaced transparently. * A Dynamic Proxy is used to wrap the actual service dependency. When the dependency goes * away, an attempt is made to replace it with another one which satisfies the service dependency * criteria. If no service replacement is available, then any method invocation (through the * dynamic proxy) will block during a configurable timeout. On timeout, an unchecked ServiceUnavailable * exception is raised (but the service is not deactivated).<p> * * When an OSGi update takes place, we use the following locking strategy: A Read/Write lock is used * to synchronize the updating thread with respect to the other threads which invoke the backed service. * The Updating thread uses an exclusive Write lock and service invokers uses a Read lock. This model * allows multiple threads to invoke the backed service concurrently, but the updating thread won't * update the dependency if it's currently in use.<p> * * <b>This class only supports required dependencies, and temporal dependencies must be accessed outside * the Activator (OSGi) thread, because method invocations may block the caller thread when dependencies * are not satisfied. * </b> * * <p> Sample Code:<p> * <blockquote> * * <pre> * import org.apache.felix.dependencymanager.*; * * public class Activator extends DependencyActivatorBase { * public void init(BundleContext ctx, DependencyManager dm) throws Exception { * dm.add(createService() * .setImplementation(MyServer.class) * .add(createTemporalServiceDependency() * .setService(MyDependency.class) * .setTimeout(15000))); * } * * public void destroy(BundleContext ctx, DependencyManager dm) throws Exception { * } * } * * class MyServer implements Runnable { * MyDependency _dependency; // Auto-Injected by reflection. * void start() { * (new Thread(this)).start(); * } * * public void run() { * try { * _dependency.doWork(); * } catch (ServiceUnavailableException e) { * t.printStackTrace(); * } * } * </pre> * * </blockquote> */ public class TemporalServiceDependency extends ServiceDependency implements InvocationHandler { // Max millis to wait for service availability. private long m_timeout = 30000; /** * Creates a new Temporal Service Dependency. * * @param context The bundle context of the bundle which is instantiating this dependency object * @param logger the logger our Internal logger for logging events. * @see DependencyActivatorBase#createTemporalServiceDependency() */ public TemporalServiceDependency(BundleContext context, Logger logger) { super(context, logger); super.setRequired(true); } /** * Sets the timeout for this temporal dependency. Specifying a timeout value of zero means that there is no timeout period, * and an invocation on a missing service will fail immediately. * * @param timeout the dependency timeout value greater or equals to 0 * @throws IllegalArgumentException if the timeout is negative * @return this temporal dependency */ public TemporalServiceDependency setTimeout(long timeout) { if (timeout < 0) { throw new IllegalArgumentException("Invalid timeout value: " + timeout); } m_timeout = timeout; return this; } /** * The ServiceTracker calls us here in order to inform about a service arrival. */ public synchronized void addedService(ServiceReference ref, Object service) { boolean makeAvailable = makeAvailable(); if (makeAvailable) { m_serviceInstance = Proxy.newProxyInstance(m_trackedServiceName.getClassLoader(), new Class[] { m_trackedServiceName }, this); } Object[] services = m_services.toArray(); for (int i = 0; i < services.length; i++) { DependencyService ds = (DependencyService) services[i]; if (makeAvailable) { ds.dependencyAvailable(this); } } if (!makeAvailable) { notifyAll(); } } /** * The ServiceTracker calls us here when a tracked service properties are modified. */ public void modifiedService(ServiceReference ref, Object service) { // We don't care. } /** * The ServiceTracker calls us here when a tracked service is lost. */ public synchronized void removedService(ServiceReference ref, Object service) { // Unget what we got in addingService (see ServiceTracker 701.4.1) m_context.ungetService(ref); } /** * @returns our dependency instance. Unlike in ServiceDependency, we always returns our proxy. */ public synchronized Object getService() { return m_serviceInstance; } public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { Object service = m_tracker.getService(); if (service == null) { synchronized (this) { long start = System.currentTimeMillis(); long waitTime = m_timeout; while (service == null) { if (waitTime <= 0) { throw new IllegalStateException("Service unavailable: " + m_trackedServiceName.getName()); } try { wait(waitTime); } catch (InterruptedException e) { throw new IllegalStateException("Service unavailable: " + m_trackedServiceName.getName()); } waitTime = m_timeout - (System.currentTimeMillis() - start); service = m_tracker.getService(); } } } try { return method.invoke(service, args); } catch (IllegalAccessException iae) { method.setAccessible(true); return method.invoke(service, args); } } }
moved to org.apache.felix.dm.impl.dependencies package git-svn-id: e057f57e93a604d3b43d277ae69bde5ebf332112@887423 13f79535-47bb-0310-9956-ffa450edef68
dependencymanager/core/src/main/java/org/apache/felix/dependencymanager/dependencies/TemporalServiceDependency.java
moved to org.apache.felix.dm.impl.dependencies package
Java
apache-2.0
4b64a065fa9e3bb7af6fc6d7497d3b47599fe1db
0
sunjincheng121/flink,kl0u/flink,haohui/flink,xccui/flink,gustavoanatoly/flink,fanzhidongyzby/flink,zimmermatt/flink,godfreyhe/flink,mtunique/flink,hequn8128/flink,wwjiang007/flink,gyfora/flink,zhangminglei/flink,StephanEwen/incubator-flink,rmetzger/flink,bowenli86/flink,ueshin/apache-flink,zhangminglei/flink,hwstreaming/flink,DieBauer/flink,hwstreaming/flink,clarkyzl/flink,aljoscha/flink,greghogan/flink,Xpray/flink,xccui/flink,mylog00/flink,haohui/flink,godfreyhe/flink,jinglining/flink,greghogan/flink,twalthr/flink,kl0u/flink,PangZhi/flink,gyfora/flink,tzulitai/flink,mbode/flink,godfreyhe/flink,twalthr/flink,bowenli86/flink,GJL/flink,shaoxuan-wang/flink,hequn8128/flink,WangTaoTheTonic/flink,zhangminglei/flink,Xpray/flink,bowenli86/flink,hongyuhong/flink,sunjincheng121/flink,tillrohrmann/flink,PangZhi/flink,mylog00/flink,aljoscha/flink,WangTaoTheTonic/flink,zohar-mizrahi/flink,sunjincheng121/flink,fanyon/flink,WangTaoTheTonic/flink,aljoscha/flink,bowenli86/flink,StephanEwen/incubator-flink,jinglining/flink,zohar-mizrahi/flink,aljoscha/flink,mbode/flink,tzulitai/flink,lincoln-lil/flink,fanzhidongyzby/flink,darionyaphet/flink,greghogan/flink,yew1eb/flink,twalthr/flink,kl0u/flink,tony810430/flink,bowenli86/flink,hequn8128/flink,zohar-mizrahi/flink,shaoxuan-wang/flink,tony810430/flink,gustavoanatoly/flink,xccui/flink,Xpray/flink,zjureel/flink,darionyaphet/flink,rmetzger/flink,zjureel/flink,wwjiang007/flink,DieBauer/flink,godfreyhe/flink,fanzhidongyzby/flink,kaibozhou/flink,StephanEwen/incubator-flink,gyfora/flink,PangZhi/flink,hongyuhong/flink,shaoxuan-wang/flink,lincoln-lil/flink,mylog00/flink,twalthr/flink,zimmermatt/flink,fanyon/flink,GJL/flink,sunjincheng121/flink,kl0u/flink,tony810430/flink,lincoln-lil/flink,shaoxuan-wang/flink,DieBauer/flink,kaibozhou/flink,gyfora/flink,zhangminglei/flink,tony810430/flink,tillrohrmann/flink,hongyuhong/flink,zentol/flink,aljoscha/flink,DieBauer/flink,ueshin/apache-flink,greghogan/flink,fhueske/flink,shaoxuan-wang/flink,gustavoanatoly/flink,tillrohrmann/flink,bowenli86/flink,zjureel/flink,tillrohrmann/flink,xccui/flink,zohar-mizrahi/flink,xccui/flink,hongyuhong/flink,wwjiang007/flink,GJL/flink,zjureel/flink,mylog00/flink,sunjincheng121/flink,zentol/flink,zimmermatt/flink,zimmermatt/flink,tzulitai/flink,apache/flink,sunjincheng121/flink,Xpray/flink,mtunique/flink,twalthr/flink,aljoscha/flink,shaoxuan-wang/flink,zentol/flink,mbode/flink,rmetzger/flink,fhueske/flink,apache/flink,tzulitai/flink,kl0u/flink,xccui/flink,mylog00/flink,apache/flink,gyfora/flink,kaibozhou/flink,godfreyhe/flink,gyfora/flink,hwstreaming/flink,gustavoanatoly/flink,mtunique/flink,WangTaoTheTonic/flink,kaibozhou/flink,fanyon/flink,zentol/flink,tony810430/flink,tzulitai/flink,zentol/flink,rmetzger/flink,yew1eb/flink,zentol/flink,GJL/flink,lincoln-lil/flink,jinglining/flink,ueshin/apache-flink,GJL/flink,zohar-mizrahi/flink,StephanEwen/incubator-flink,lincoln-lil/flink,gustavoanatoly/flink,tillrohrmann/flink,hequn8128/flink,fanzhidongyzby/flink,fhueske/flink,tillrohrmann/flink,greghogan/flink,apache/flink,ueshin/apache-flink,clarkyzl/flink,tony810430/flink,fhueske/flink,mbode/flink,xccui/flink,zjureel/flink,haohui/flink,darionyaphet/flink,hequn8128/flink,lincoln-lil/flink,haohui/flink,godfreyhe/flink,twalthr/flink,tillrohrmann/flink,kl0u/flink,hwstreaming/flink,zentol/flink,WangTaoTheTonic/flink,darionyaphet/flink,ueshin/apache-flink,apache/flink,lincoln-lil/flink,mbode/flink,zimmermatt/flink,yew1eb/flink,twalthr/flink,haohui/flink,kaibozhou/flink,wwjiang007/flink,PangZhi/flink,fhueske/flink,gyfora/flink,StephanEwen/incubator-flink,zjureel/flink,rmetzger/flink,wwjiang007/flink,yew1eb/flink,StephanEwen/incubator-flink,GJL/flink,fanyon/flink,tzulitai/flink,Xpray/flink,rmetzger/flink,darionyaphet/flink,kaibozhou/flink,clarkyzl/flink,jinglining/flink,yew1eb/flink,clarkyzl/flink,DieBauer/flink,tony810430/flink,apache/flink,jinglining/flink,hongyuhong/flink,wwjiang007/flink,mtunique/flink,zhangminglei/flink,greghogan/flink,fanzhidongyzby/flink,zjureel/flink,clarkyzl/flink,jinglining/flink,fanyon/flink,hequn8128/flink,godfreyhe/flink,hwstreaming/flink,mtunique/flink,apache/flink,rmetzger/flink,PangZhi/flink,wwjiang007/flink,fhueske/flink
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.executiongraph; import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.*; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.Mockito.times; import static org.mockito.Matchers.any; import org.apache.flink.runtime.deployment.TaskDeploymentDescriptor; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.execution.librarycache.LibraryCacheManager; import org.apache.flink.runtime.instance.AllocatedSlot; import org.apache.flink.runtime.instance.Instance; import org.apache.flink.runtime.jobgraph.JobID; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.protocols.TaskOperationProtocol; import org.apache.flink.runtime.taskmanager.TaskOperationResult; import org.junit.Test; import org.mockito.Matchers; public class ExecutionVertexDeploymentTest { @Test public void testDeployCall() { try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexNotExecuting(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); vertex.deployToSlot(slot); assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState()); // no repeated scheduling try { vertex.deployToSlot(slot); fail("Scheduled from wrong state"); } catch (IllegalStateException e) {} assertNull(vertex.getFailureCause()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testDeployWithSynchronousAnswer() { try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call final TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexExecutingSynchronously(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); when(taskManager.submitTask(Matchers.any(TaskDeploymentDescriptor.class))).thenReturn(new TaskOperationResult(vertex.getCurrentExecutionAttempt().getAttemptId(), true)); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); LibraryCacheManager.register(vertex.getJobId(), new String[0]); vertex.deployToSlot(slot); assertEquals(ExecutionState.RUNNING, vertex.getExecutionState()); // no repeated scheduling try { vertex.deployToSlot(slot); fail("Scheduled from wrong state"); } catch (IllegalStateException e) {} verify(taskManager).submitTask(Matchers.any(TaskDeploymentDescriptor.class)); assertNull(vertex.getFailureCause()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.RUNNING) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testDeployWithAsynchronousAnswer() { try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexExecutingAsynchronously(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); when(taskManager.submitTask(Matchers.any(TaskDeploymentDescriptor.class))).thenReturn(new TaskOperationResult(vertex.getCurrentExecutionAttempt().getAttemptId(), true)); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); LibraryCacheManager.register(vertex.getJobId(), new String[0]); vertex.deployToSlot(slot); // no repeated scheduling try { vertex.deployToSlot(slot); fail("Scheduled from wrong state"); } catch (IllegalStateException e) {} // wait until the state transition must be done for (int i = 0; i < 100; i++) { if (vertex.getExecutionState() != ExecutionState.RUNNING) { Thread.sleep(10); } } assertEquals(ExecutionState.RUNNING, vertex.getExecutionState()); // no repeated scheduling try { vertex.deployToSlot(slot); fail("Scheduled from wrong state"); } catch (IllegalStateException e) {} verify(taskManager).submitTask(Matchers.any(TaskDeploymentDescriptor.class)); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.RUNNING) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testDeployFailedSynchronous() { final String ERROR_MESSAGE = "test_failure_error_message"; try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call final TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexExecutingSynchronously(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); when(taskManager.submitTask(Matchers.any(TaskDeploymentDescriptor.class))).thenReturn(new TaskOperationResult(vertex.getCurrentExecutionAttempt().getAttemptId(), false, ERROR_MESSAGE)); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); LibraryCacheManager.register(vertex.getJobId(), new String[0]); vertex.deployToSlot(slot); assertEquals(ExecutionState.FAILED, vertex.getExecutionState()); assertNotNull(vertex.getFailureCause()); assertTrue(vertex.getFailureCause().getMessage().contains(ERROR_MESSAGE)); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testDeployFailedAsynchronously() { final String ERROR_MESSAGE = "test_failure_error_message"; try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call final TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexExecutingAsynchronously(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); when(taskManager.submitTask(Matchers.any(TaskDeploymentDescriptor.class))).thenReturn(new TaskOperationResult(vertex.getCurrentExecutionAttempt().getAttemptId(), false, ERROR_MESSAGE)); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); LibraryCacheManager.register(vertex.getJobId(), new String[0]); vertex.deployToSlot(slot); // wait until the state transition must be done for (int i = 0; i < 100; i++) { if (vertex.getExecutionState() == ExecutionState.FAILED && vertex.getFailureCause() != null) { break; } else { Thread.sleep(10); } } assertEquals(ExecutionState.FAILED, vertex.getExecutionState()); assertNotNull(vertex.getFailureCause()); assertTrue(vertex.getFailureCause().getMessage().contains(ERROR_MESSAGE)); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testFailExternallyDuringDeploy() { try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexNotExecuting(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); vertex.deployToSlot(slot); assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState()); Exception testError = new Exception("test error"); vertex.fail(testError); assertEquals(ExecutionState.FAILED, vertex.getExecutionState()); assertEquals(testError, vertex.getFailureCause()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testFailCallOvertakesDeploymentAnswer() { try { final ActionQueue queue = new ActionQueue(); final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexExecutingTriggered(jid, queue); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); final ExecutionAttemptID eid = vertex.getCurrentExecutionAttempt().getAttemptId(); // the deployment call succeeds regularly when(taskManager.submitTask(any(TaskDeploymentDescriptor.class))).thenReturn(new TaskOperationResult(eid, true)); // first cancel call does not find a task, second one finds it when(taskManager.cancelTask(any(ExecutionAttemptID.class))).thenReturn( new TaskOperationResult(eid, false), new TaskOperationResult(eid, true)); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); LibraryCacheManager.register(vertex.getJobId(), new String[0]); vertex.deployToSlot(slot); assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState()); Exception testError = new Exception("test error"); vertex.fail(testError); assertEquals(ExecutionState.FAILED, vertex.getExecutionState()); // now the deploy call returns Runnable deploy = queue.popNextAction(); Runnable cancel1 = queue.popNextAction(); // cancel call overtakes cancel1.run(); deploy.run(); assertEquals(ExecutionState.FAILED, vertex.getExecutionState()); // should have sent another cancel call queue.triggerNextAction(); assertEquals(testError, vertex.getFailureCause()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0); // should have received two cancel calls verify(taskManager, times(2)).cancelTask(eid); verify(taskManager, times(1)).submitTask(any(TaskDeploymentDescriptor.class)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } }
flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/ExecutionVertexDeploymentTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.executiongraph; import static org.apache.flink.runtime.executiongraph.ExecutionGraphTestUtils.*; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.Mockito.times; import static org.mockito.Matchers.any; import org.apache.flink.runtime.deployment.TaskDeploymentDescriptor; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.execution.librarycache.LibraryCacheManager; import org.apache.flink.runtime.instance.AllocatedSlot; import org.apache.flink.runtime.instance.Instance; import org.apache.flink.runtime.jobgraph.JobID; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.protocols.TaskOperationProtocol; import org.apache.flink.runtime.taskmanager.TaskOperationResult; import org.junit.Test; import org.mockito.Matchers; public class ExecutionVertexDeploymentTest { @Test public void testDeployCall() { try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexNotExecuting(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); vertex.deployToSlot(slot); assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState()); // no repeated scheduling try { vertex.deployToSlot(slot); fail("Scheduled from wrong state"); } catch (IllegalStateException e) {} assertNull(vertex.getFailureCause()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testDeployWithSynchronousAnswer() { try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call final TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexExecutingSynchronously(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); when(taskManager.submitTask(Matchers.any(TaskDeploymentDescriptor.class))).thenReturn(new TaskOperationResult(vertex.getCurrentExecutionAttempt().getAttemptId(), true)); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); LibraryCacheManager.register(vertex.getJobId(), new String[0]); vertex.deployToSlot(slot); assertEquals(ExecutionState.RUNNING, vertex.getExecutionState()); // no repeated scheduling try { vertex.deployToSlot(slot); fail("Scheduled from wrong state"); } catch (IllegalStateException e) {} verify(taskManager).submitTask(Matchers.any(TaskDeploymentDescriptor.class)); assertNull(vertex.getFailureCause()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.RUNNING) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testDeployWithAsynchronousAnswer() { try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexExecutingAsynchronously(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); when(taskManager.submitTask(Matchers.any(TaskDeploymentDescriptor.class))).thenReturn(new TaskOperationResult(vertex.getCurrentExecutionAttempt().getAttemptId(), true)); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); LibraryCacheManager.register(vertex.getJobId(), new String[0]); vertex.deployToSlot(slot); // no repeated scheduling try { vertex.deployToSlot(slot); fail("Scheduled from wrong state"); } catch (IllegalStateException e) {} // wait until the state transition must be done for (int i = 0; i < 100; i++) { if (vertex.getExecutionState() != ExecutionState.RUNNING) { Thread.sleep(10); } } assertEquals(ExecutionState.RUNNING, vertex.getExecutionState()); // no repeated scheduling try { vertex.deployToSlot(slot); fail("Scheduled from wrong state"); } catch (IllegalStateException e) {} verify(taskManager).submitTask(Matchers.any(TaskDeploymentDescriptor.class)); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.RUNNING) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testDeployFailedSynchronous() { final String ERROR_MESSAGE = "test_failure_error_message"; try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call final TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexExecutingSynchronously(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); when(taskManager.submitTask(Matchers.any(TaskDeploymentDescriptor.class))).thenReturn(new TaskOperationResult(vertex.getCurrentExecutionAttempt().getAttemptId(), false, ERROR_MESSAGE)); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); LibraryCacheManager.register(vertex.getJobId(), new String[0]); vertex.deployToSlot(slot); assertEquals(ExecutionState.FAILED, vertex.getExecutionState()); assertNotNull(vertex.getFailureCause()); assertTrue(vertex.getFailureCause().getMessage().contains(ERROR_MESSAGE)); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testDeployFailedAsynchronously() { final String ERROR_MESSAGE = "test_failure_error_message"; try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call final TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexExecutingAsynchronously(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); when(taskManager.submitTask(Matchers.any(TaskDeploymentDescriptor.class))).thenReturn(new TaskOperationResult(vertex.getCurrentExecutionAttempt().getAttemptId(), false, ERROR_MESSAGE)); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); LibraryCacheManager.register(vertex.getJobId(), new String[0]); vertex.deployToSlot(slot); // wait until the state transition must be done for (int i = 0; i < 100; i++) { if (vertex.getExecutionState() != ExecutionState.FAILED) { Thread.sleep(10); } } assertEquals(ExecutionState.FAILED, vertex.getExecutionState()); assertNotNull(vertex.getFailureCause()); assertTrue(vertex.getFailureCause().getMessage().contains(ERROR_MESSAGE)); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testFailExternallyDuringDeploy() { try { final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexNotExecuting(jid); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); vertex.deployToSlot(slot); assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState()); Exception testError = new Exception("test error"); vertex.fail(testError); assertEquals(ExecutionState.FAILED, vertex.getExecutionState()); assertEquals(testError, vertex.getFailureCause()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } @Test public void testFailCallOvertakesDeploymentAnswer() { try { final ActionQueue queue = new ActionQueue(); final JobVertexID jid = new JobVertexID(); // mock taskmanager to simply accept the call TaskOperationProtocol taskManager = mock(TaskOperationProtocol.class); final Instance instance = getInstance(taskManager); final AllocatedSlot slot = instance.allocateSlot(new JobID()); final ExecutionJobVertex ejv = getJobVertexExecutingTriggered(jid, queue); final ExecutionVertex vertex = new ExecutionVertex(ejv, 0, new IntermediateResult[0]); final ExecutionAttemptID eid = vertex.getCurrentExecutionAttempt().getAttemptId(); // the deployment call succeeds regularly when(taskManager.submitTask(any(TaskDeploymentDescriptor.class))).thenReturn(new TaskOperationResult(eid, true)); // first cancel call does not find a task, second one finds it when(taskManager.cancelTask(any(ExecutionAttemptID.class))).thenReturn( new TaskOperationResult(eid, false), new TaskOperationResult(eid, true)); assertEquals(ExecutionState.CREATED, vertex.getExecutionState()); LibraryCacheManager.register(vertex.getJobId(), new String[0]); vertex.deployToSlot(slot); assertEquals(ExecutionState.DEPLOYING, vertex.getExecutionState()); Exception testError = new Exception("test error"); vertex.fail(testError); assertEquals(ExecutionState.FAILED, vertex.getExecutionState()); // now the deploy call returns Runnable deploy = queue.popNextAction(); Runnable cancel1 = queue.popNextAction(); // cancel call overtakes cancel1.run(); deploy.run(); assertEquals(ExecutionState.FAILED, vertex.getExecutionState()); // should have sent another cancel call queue.triggerNextAction(); assertEquals(testError, vertex.getFailureCause()); assertTrue(vertex.getStateTimestamp(ExecutionState.CREATED) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.DEPLOYING) > 0); assertTrue(vertex.getStateTimestamp(ExecutionState.FAILED) > 0); // should have received two cancel calls verify(taskManager, times(2)).cancelTask(eid); verify(taskManager, times(1)).submitTask(any(TaskDeploymentDescriptor.class)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } }
Improved robustness of ExecutionGraphDeploymentTest
flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/ExecutionVertexDeploymentTest.java
Improved robustness of ExecutionGraphDeploymentTest
Java
apache-2.0
ca4b84a9e6d0e82348c6149aad6f831b2ede5d76
0
matrix-org/matrix-android-sdk,matrix-org/matrix-android-sdk,matrix-org/matrix-android-sdk
/* * Copyright 2014 OpenMarket Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.matrix.androidsdk.util; import android.text.TextUtils; import org.matrix.androidsdk.MXDataHandler; import org.matrix.androidsdk.MXSession; import org.matrix.androidsdk.data.MyUser; import org.matrix.androidsdk.data.Room; import org.matrix.androidsdk.listeners.IMXNetworkEventListener; import org.matrix.androidsdk.network.NetworkConnectivityReceiver; import org.matrix.androidsdk.rest.callback.ApiCallback; import org.matrix.androidsdk.rest.callback.SimpleApiCallback; import org.matrix.androidsdk.rest.client.BingRulesRestClient; import org.matrix.androidsdk.rest.model.Event; import org.matrix.androidsdk.rest.model.MatrixError; import org.matrix.androidsdk.rest.model.Message; import org.matrix.androidsdk.rest.model.bingrules.BingRule; import org.matrix.androidsdk.rest.model.bingrules.BingRuleSet; import org.matrix.androidsdk.rest.model.bingrules.BingRulesResponse; import org.matrix.androidsdk.rest.model.bingrules.Condition; import org.matrix.androidsdk.rest.model.bingrules.ContainsDisplayNameCondition; import org.matrix.androidsdk.rest.model.bingrules.ContentRule; import org.matrix.androidsdk.rest.model.bingrules.EventMatchCondition; import org.matrix.androidsdk.rest.model.bingrules.RoomMemberCountCondition; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; /** * Object that gets and processes bing rules from the server. */ public class BingRulesManager { private static final String LOG_TAG = "BingRulesManager"; /** * Bing rule listener */ public interface onBingRuleUpdateListener { /** * The manager succeeds to update the bingrule enable status. */ void onBingRuleUpdateSuccess(); /** * The manager fails to update the bingrule enable status. * @param errorMessage the error message. */ void onBingRuleUpdateFailure(String errorMessage); } /** * Bing rules update */ public interface onBingRulesUpdateListener { /** * Warn that some bing rules have been updated */ void onBingRulesUpdate(); } // general members private final BingRulesRestClient mApiClient; private final MXSession mSession; private final String mMyUserId; private final MXDataHandler mDataHandler; // the rules set to apply private BingRuleSet mRulesSet = new BingRuleSet(); // the rules list private final List<BingRule> mRules = new ArrayList<>(); // the default bing rule private BingRule mDefaultBingRule = new BingRule(true); // tell if the bing rules set is initialized private boolean mIsInitialized = false; // map to check if a room is "mention only" private final Map<String, Boolean> mIsMentionOnlyMap = new HashMap<>(); // network management private NetworkConnectivityReceiver mNetworkConnectivityReceiver; private IMXNetworkEventListener mNetworkListener; private ApiCallback<Void> mLoadRulesCallback; // listener private final Set<onBingRulesUpdateListener> mBingRulesUpdateListeners = new HashSet<>(); /** * Constructor * @param session the session * @param networkConnectivityReceiver the network events listener */ public BingRulesManager(MXSession session, NetworkConnectivityReceiver networkConnectivityReceiver) { mSession = session; mApiClient = session.getBingRulesApiClient(); mMyUserId = session.getCredentials().userId; mDataHandler = session.getDataHandler(); mNetworkListener = new IMXNetworkEventListener() { @Override public void onNetworkConnectionUpdate(boolean isConnected) { // mLoadRulesCallback is set when a loadRules failed // so when a network is available, trigger again loadRules if (isConnected && (null != mLoadRulesCallback)) { loadRules(mLoadRulesCallback); } } }; mNetworkConnectivityReceiver = networkConnectivityReceiver; networkConnectivityReceiver.addEventListener(mNetworkListener); } /** * @return true if it is ready to be used (i.e initializedà */ public boolean isReady() { return mIsInitialized; } /** * Remove the network events listener. * This listener is only used to initialize the rules at application launch. */ private void removeNetworkListener() { if ((null != mNetworkConnectivityReceiver) && (null != mNetworkListener)) { mNetworkConnectivityReceiver.removeEventListener(mNetworkListener); mNetworkConnectivityReceiver = null; mNetworkListener = null; } } /** * Add a listener * @param listener the listener */ public void addBingRulesUpdateListener(onBingRulesUpdateListener listener) { if (null != listener) { mBingRulesUpdateListeners.add(listener); } } /** * remove a listener * @param listener the listener */ public void removeBingRulesUpdateListener(onBingRulesUpdateListener listener) { if (null != listener) { mBingRulesUpdateListeners.remove(listener); } } /** * Some rules have been updated. */ private void onBingRulesUpdate() { for(onBingRulesUpdateListener listener : mBingRulesUpdateListeners) { try { listener.onBingRulesUpdate(); } catch (Exception e) { Log.e(LOG_TAG, "## onBingRulesUpdate() : onBingRulesUpdate failed " + e.getMessage()); } } } /** * Load the bing rules from the server. * @param callback an async callback called when the rules are loaded */ public void loadRules(final ApiCallback<Void> callback) { mLoadRulesCallback = null; Log.d(LOG_TAG, "## loadRules() : refresh the bing rules"); mApiClient.getAllBingRules(new ApiCallback<BingRulesResponse>() { @Override public void onSuccess(BingRulesResponse info) { Log.d(LOG_TAG, "## loadRules() : succeeds"); buildRules(info); mIsInitialized = true; if (callback != null) { callback.onSuccess(null); } removeNetworkListener(); } private void onError(String errorMessage) { Log.e(LOG_TAG, "## loadRules() : failed " + errorMessage); // the callback will be called when the request will succeed mLoadRulesCallback = callback; } @Override public void onNetworkError(Exception e) { onError(e.getMessage()); } @Override public void onMatrixError(MatrixError e) { onError(e.getMessage()); } @Override public void onUnexpectedError(Exception e) { onError(e.getMessage()); } }); } /** * Update the rule enable status. * @param kind the rule kind. * @param ruleId the rule ID. * @param status the new enable status. * @param callback an async callback. */ public void updateEnableRuleStatus(String kind, String ruleId, boolean status, final ApiCallback<Void> callback) { mApiClient.updateEnableRuleStatus(kind, ruleId, status, callback); } /** * Returns whether a string contains an occurrence of another, as a standalone word, regardless of case. * @param subString the string to search for * @param longString the string to search in * @return whether a match was found */ private static boolean caseInsensitiveFind(String subString, String longString) { // sanity check if (TextUtils.isEmpty(subString) || TextUtils.isEmpty(longString)) { return false; } boolean found = false; try { Pattern pattern = Pattern.compile("(\\W|^)" + subString + "(\\W|$)", Pattern.CASE_INSENSITIVE); found = pattern.matcher(longString).find(); } catch (Exception e) { Log.e(LOG_TAG, "caseInsensitiveFind : pattern.matcher failed with " + e.getMessage()); } return found; } /** * Returns the first highlighted notifiable bing rule which fulfills its condition with this event. * @param event the event * @return the first matched bing rule, null if none */ public BingRule fulfilledHighlightBingRule(Event event) { return fulfilledBingRule(event, true); } /** * Returns the first notifiable bing rule which fulfills its condition with this event. * @param event the event * @return the first matched bing rule, null if none */ public BingRule fulfilledBingRule(Event event) { return fulfilledBingRule(event, false); } /** * Returns the first notifiable bing rule which fulfills its condition with this event. * @param event the event * @param highlightRuleOnly true to only check the highlight rule * @return the first matched bing rule, null if none */ private BingRule fulfilledBingRule(Event event, boolean highlightRuleOnly) { // sanity check if (null == event) { Log.e(LOG_TAG, "## fulfilledBingRule() : null event"); return null; } if (!mIsInitialized) { Log.e(LOG_TAG, "## fulfilledBingRule() : not initialized"); return null; } if (0 == mRules.size()) { Log.e(LOG_TAG, "## fulfilledBingRule() : no rules"); return null; } // do not trigger notification for oneself messages if ((null != event.getSender()) && TextUtils.equals(event.getSender(), mMyUserId)) { return null; } String eventType = event.getType(); // some types are not bingable if (TextUtils.equals(eventType, Event.EVENT_TYPE_PRESENCE) || TextUtils.equals(eventType, Event.EVENT_TYPE_TYPING) || TextUtils.equals(eventType, Event.EVENT_TYPE_REDACTION) || TextUtils.equals(eventType, Event.EVENT_TYPE_RECEIPT) || TextUtils.equals(eventType, Event.EVENT_TYPE_TAGS)) { return null; } // GA issue final ArrayList<BingRule> rules; synchronized (this) { rules = new ArrayList<>(mRules); } // Go down the rule list until we find a match for (BingRule bingRule : rules) { if (bingRule.isEnabled && (!highlightRuleOnly || bingRule.shouldHighlight())) { boolean isFullfilled = false; // some rules have no condition // so their ruleId defines the method if (BingRule.RULE_ID_CONTAIN_USER_NAME.equals(bingRule.ruleId) || BingRule.RULE_ID_CONTAIN_DISPLAY_NAME.equals(bingRule.ruleId)) { if (Event.EVENT_TYPE_MESSAGE.equals(event.getType())) { Message message = JsonUtils.toMessage(event.getContent()); MyUser myUser = mSession.getMyUser(); String pattern = null; if (BingRule.RULE_ID_CONTAIN_USER_NAME.equals(bingRule.ruleId)) { if (mMyUserId.indexOf(":") >= 0) { pattern = mMyUserId.substring(1, mMyUserId.indexOf(":")); } else { pattern = mMyUserId; } } else if (BingRule.RULE_ID_CONTAIN_DISPLAY_NAME.equals(bingRule.ruleId)) { pattern = myUser.displayname; if ((null != mSession.getDataHandler()) && (null != mSession.getDataHandler().getStore())) { Room room = mSession.getDataHandler().getStore().getRoom(event.roomId); if ((null != room) && (null != room.getLiveState())) { String disambiguousedName = room.getLiveState().getMemberName(mMyUserId); if (!TextUtils.equals(disambiguousedName, mMyUserId)) { pattern = Pattern.quote(disambiguousedName); } } } } if (!TextUtils.isEmpty(pattern)) { isFullfilled = caseInsensitiveFind(pattern, message.body); } } } else if (BingRule.RULE_ID_FALLBACK.equals(bingRule.ruleId)) { isFullfilled = true; } else { // some default rules define conditions // so use them instead of doing a custom treatment // RULE_ID_ONE_TO_ONE_ROOM // RULE_ID_SUPPRESS_BOTS_NOTIFICATIONS isFullfilled = eventMatchesConditions(event, bingRule.conditions); } if (isFullfilled) { return bingRule; } } } // no rules are fulfilled return null; } /** * Check if an event matches a conditions set * @param event the evnt to test * @param conditions the conditions set * @return true if the event matches all the conditions set. */ private boolean eventMatchesConditions(Event event, List<Condition> conditions) { try { if ((conditions != null) && (event != null)) { for (Condition condition : conditions) { if (condition instanceof EventMatchCondition) { if (!((EventMatchCondition) condition).isSatisfied(event)) { return false; } } else if (condition instanceof ContainsDisplayNameCondition) { if (event.roomId != null) { Room room = mDataHandler.getRoom(event.roomId, false); // sanity checks if ((null != room) && (null != room.getMember(mMyUserId))) { // Best way to get your display name for now String myDisplayName = room.getMember(mMyUserId).displayname; if (!((ContainsDisplayNameCondition) condition).isSatisfied(event, myDisplayName)) { return false; } } } } else if (condition instanceof RoomMemberCountCondition) { if (event.roomId != null) { Room room = mDataHandler.getRoom(event.roomId, false); if (!((RoomMemberCountCondition) condition).isSatisfied(room)) { return false; } } } // FIXME: Handle device rules } } } catch (Exception e) { Log.e(LOG_TAG, "## eventMatchesConditions() failed " + e.getMessage()); return false; } return true; } /** * Build the internal build rules * @param bingRulesResponse the server request response. */ public void buildRules(BingRulesResponse bingRulesResponse) { if (null != bingRulesResponse) { updateRules(bingRulesResponse.global); onBingRulesUpdate(); } } /** * @return the rules set */ public BingRuleSet pushRules() { return mRulesSet; } /** * Update mRulesSet with the new one. * @param ruleSet the new ruleSet to apply */ private void updateRules(BingRuleSet ruleSet) { synchronized (this) { // clear the rules list // it is mRules.clear(); // sanity check if (null == ruleSet) { mRulesSet = new BingRuleSet(); return; } // Replace the list by ArrayList to be able to add/remove rules // Add the rule kind in each rule // Ensure that the null pointers are replaced by an empty list if (ruleSet.override != null) { ruleSet.override = new ArrayList<>(ruleSet.override); for (BingRule rule : ruleSet.override) { rule.kind = BingRule.KIND_OVERRIDE; } mRules.addAll(ruleSet.override); } else { ruleSet.override = new ArrayList<>(ruleSet.override); } if (ruleSet.content != null) { ruleSet.content = new ArrayList<>(ruleSet.content); for (BingRule rule : ruleSet.content) { rule.kind = BingRule.KIND_CONTENT; } addContentRules(ruleSet.content); } else { ruleSet.content = new ArrayList<>(); } mIsMentionOnlyMap.clear(); if (ruleSet.room != null) { ruleSet.room = new ArrayList<>(ruleSet.room); for (BingRule rule : ruleSet.room) { rule.kind = BingRule.KIND_ROOM; } addRoomRules(ruleSet.room); } else { ruleSet.room = new ArrayList<>(); } if (ruleSet.sender != null) { ruleSet.sender = new ArrayList<>(ruleSet.sender); for (BingRule rule : ruleSet.sender) { rule.kind = BingRule.KIND_SENDER; } addSenderRules(ruleSet.sender); } else { ruleSet.sender = new ArrayList<>(); } if (ruleSet.underride != null) { ruleSet.underride = new ArrayList<>(ruleSet.underride); for (BingRule rule : ruleSet.underride) { rule.kind = BingRule.KIND_UNDERRIDE; } mRules.addAll(ruleSet.underride); } else { ruleSet.underride = new ArrayList<>(); } mRulesSet = ruleSet; Log.d(LOG_TAG, "## updateRules() : has " + mRules.size() + " rules"); } } /** * Create a content EventMatchConditions list from a ContentRules list * @param rules the ContentRules list */ private void addContentRules(List<ContentRule> rules) { // sanity check if (null != rules) { for (ContentRule rule : rules) { EventMatchCondition condition = new EventMatchCondition(); condition.kind = Condition.KIND_EVENT_MATCH; condition.key = "content.body"; condition.pattern = rule.pattern; rule.addCondition(condition); mRules.add(rule); } } } /** * Create a room EventMatchConditions list from a BingRule list * @param rules the BingRule list */ private void addRoomRules(List<BingRule> rules) { if (null != rules) { for (BingRule rule : rules) { EventMatchCondition condition = new EventMatchCondition(); condition.kind = Condition.KIND_EVENT_MATCH; condition.key = "room_id"; condition.pattern = rule.ruleId; rule.addCondition(condition); mRules.add(rule); } } } /** * Create a sender EventMatchConditions list from a BingRule list * @param rules the BingRule list */ private void addSenderRules(List<BingRule> rules) { if (null != rules) { for (BingRule rule : rules) { EventMatchCondition condition = new EventMatchCondition(); condition.kind = Condition.KIND_EVENT_MATCH; condition.key = "user_id"; condition.pattern = rule.ruleId; rule.addCondition(condition); mRules.add(rule); } } } /** * Toogle a rule. * @param rule the bing rule to toggle. * @param listener the rule update listener. * @return the matched bing rule or null it doesn't exist. */ public BingRule toggleRule(final BingRule rule, final onBingRuleUpdateListener listener) { if (null != rule) { updateEnableRuleStatus(rule.kind, rule.ruleId, !rule.isEnabled, new SimpleApiCallback<Void>() { @Override public void onSuccess(Void info) { rule.isEnabled = !rule.isEnabled; updateRules(mRulesSet); onBingRulesUpdate(); if (listener != null) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## toggleRule : onBingRuleUpdateSuccess failed " + e.getMessage()); } } } private void onError(String message) { if (null != listener) { try { listener.onBingRuleUpdateFailure(message); } catch (Exception e) { Log.e(LOG_TAG, "## onError : onBingRuleUpdateFailure failed " + e.getMessage()); } } } /** * Called if there is a network error. * @param e the exception */ @Override public void onNetworkError(Exception e) { onError(e.getLocalizedMessage()); } /** * Called in case of a Matrix error. * @param e the Matrix error */ @Override public void onMatrixError(MatrixError e) { onError(e.getLocalizedMessage()); } /** * Called for some other type of error. * @param e the exception */ @Override public void onUnexpectedError(Exception e) { onError(e.getLocalizedMessage()); } }); } return rule; } /** * Delete the rule. * @param rule the rule to delete. * @param listener the rule update listener. */ public void deleteRule(final BingRule rule, final onBingRuleUpdateListener listener) { // null case if (null == rule) { if (listener != null) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## deleteRule : onBingRuleUpdateSuccess failed " + e.getMessage()); } } return; } mApiClient.deleteRule(rule.kind, rule.ruleId, new SimpleApiCallback<Void>() { @Override public void onSuccess(Void info) { if (null != mRulesSet) { mRulesSet.remove(rule); updateRules(mRulesSet); onBingRulesUpdate(); } if (listener != null) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## deleteRule : onBingRuleUpdateSuccess failed " + e.getMessage()); } } } private void onError(String message) { if (null != listener) { try { listener.onBingRuleUpdateFailure(message); } catch (Exception e) { Log.e(LOG_TAG, "## onError : onBingRuleUpdateFailure failed " + e.getMessage()); } } } /** * Called if there is a network error. * @param e the exception */ @Override public void onNetworkError(Exception e) { onError(e.getLocalizedMessage()); } /** * Called in case of a Matrix error. * @param e the Matrix error */ @Override public void onMatrixError(MatrixError e) { onError(e.getLocalizedMessage()); } /** * Called for some other type of error. * @param e the exception */ @Override public void onUnexpectedError(Exception e) { onError(e.getLocalizedMessage()); } }); } /** * Delete a rules list. * @param rules the rules to delete * @param listener the listener when the rules are deleted */ public void deleteRules(final List<BingRule> rules, final onBingRuleUpdateListener listener) { deleteRules(rules, 0, listener); } /** * Recursive rules deletion method. * @param rules the rules to delete * @param index the rule index * @param listener the listener when the rules are deleted */ private void deleteRules(final List<BingRule> rules, final int index, final onBingRuleUpdateListener listener) { // sanity checks if ((null == rules) || (index >= rules.size())) { onBingRulesUpdate(); if (null != listener) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## deleteRules() : onBingRuleUpdateSuccess failed " + e.getMessage()); } } return; } // delete the rule deleteRule(rules.get(index), new onBingRuleUpdateListener() { @Override public void onBingRuleUpdateSuccess() { deleteRules(rules, index+1, listener); } @Override public void onBingRuleUpdateFailure(String errorMessage) { if (null != listener) { try { listener.onBingRuleUpdateFailure(errorMessage); } catch (Exception e) { Log.e(LOG_TAG, "## deleteRules() : onBingRuleUpdateFailure failed " + e.getMessage()); } } } }); } /** * Add a rule. * @param rule the rule to delete. * @param listener the rule update listener. */ public void addRule(final BingRule rule, final onBingRuleUpdateListener listener) { // null case if (null == rule) { if (listener != null) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## addRule : onBingRuleUpdateSuccess failed " + e.getMessage()); } } return; } mApiClient.addRule(rule, new SimpleApiCallback<Void>() { @Override public void onSuccess(Void info) { if (null != mRulesSet) { mRulesSet.addAtTop(rule); updateRules(mRulesSet); onBingRulesUpdate(); } if (listener != null) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## addRule : onBingRuleUpdateSuccess failed " + e.getMessage()); } } } private void onError(String message) { if (null != listener) { try { listener.onBingRuleUpdateFailure(message); } catch (Exception e) { Log.e(LOG_TAG, "## addRule : onBingRuleUpdateFailure failed " + e.getMessage()); } } } /** * Called if there is a network error. * @param e the exception */ @Override public void onNetworkError(Exception e) { onError(e.getLocalizedMessage()); } /** * Called in case of a Matrix error. * @param e the Matrix error */ @Override public void onMatrixError(MatrixError e) { onError(e.getLocalizedMessage()); } /** * Called for some other type of error. * @param e the exception */ @Override public void onUnexpectedError(Exception e) { onError(e.getLocalizedMessage()); } }); } /** * Search the push rules for the room id * @param roomId the room id * @return the room rules list */ public List<BingRule> getPushRulesForRoomId(String roomId) { ArrayList<BingRule> rules = new ArrayList<>(); // sanity checks if (!TextUtils.isEmpty(roomId) && (null != mRulesSet)) { // the webclient defines two ways to set a room rule // mention only : the user won't have any push for the room except if a content rule is fullfilled // mute : no notification for this room // mute rules are defined in override groups if (null != mRulesSet.override) { for (BingRule roomRule : mRulesSet.override) { if (TextUtils.equals(roomRule.ruleId, roomId)) { rules.add(roomRule); } } } // mention only are defined in room group if (null != mRulesSet.room) { for (BingRule roomRule : mRulesSet.room) { if (TextUtils.equals(roomRule.ruleId, roomId)) { rules.add(roomRule); } } } } return rules; } /** * Tell whether the regular notifications are disabled for the room. * @param roomId the room id * @return true if the regular notifications are disabled (mention only) */ public boolean isRoomMentionOnly(String roomId) { // sanity check if (!TextUtils.isEmpty(roomId)) { if (mIsMentionOnlyMap.containsKey(roomId)) { return mIsMentionOnlyMap.get(roomId); } if (null != mRulesSet.room) { for (BingRule roomRule : mRulesSet.room) { if (TextUtils.equals(roomRule.ruleId, roomId)) { List<BingRule> roomRules = getPushRulesForRoomId(roomId); if (0 != roomRules.size()) { for (BingRule rule : roomRules) { if (rule.shouldNotNotify()) { mIsMentionOnlyMap.put(roomId, rule.isEnabled); return rule.isEnabled; } } } } } } mIsMentionOnlyMap.put(roomId, false); } return false; } /** * Test if the room has a dedicated rule which disables notification. * @param roomId the roomId * @return true if there is a rule to disable notifications. */ public boolean isRoomNotificationsDisabled(String roomId) { List<BingRule> roomRules = getPushRulesForRoomId(roomId); if (0 != roomRules.size()) { for(BingRule rule : roomRules) { if (!rule.shouldNotify() && rule.isEnabled) { return true; } } } return false; } /** * Mute / unmute the room notifications. * Only the room rules are checked. * * @param roomId the room id to mute / unmute. * @param isMuted set to true to mute the notification * @param listener the listener. */ public void muteRoomNotifications(final String roomId, final boolean isMuted, final onBingRuleUpdateListener listener) { List<BingRule> bingRules = getPushRulesForRoomId(roomId); // the mobile client only supports to define a "mention only" rule i.e a rule defined in the room rules set. // delete the rule and create a new one deleteRules(bingRules, new onBingRuleUpdateListener() { @Override public void onBingRuleUpdateSuccess() { if (isMuted) { addRule(new BingRule(BingRule.KIND_ROOM, roomId, false, false, false), listener); } else if (null != listener) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## muteRoomNotifications() : onBingRuleUpdateSuccess failed " + e.getMessage()); } } } @Override public void onBingRuleUpdateFailure(String errorMessage) { if (null != listener) { try { listener.onBingRuleUpdateFailure(errorMessage); } catch (Exception e) { Log.e(LOG_TAG, "## muteRoomNotifications() : onBingRuleUpdateFailure failed " + e.getMessage()); } } } }); } }
matrix-sdk/src/main/java/org/matrix/androidsdk/util/BingRulesManager.java
/* * Copyright 2014 OpenMarket Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.matrix.androidsdk.util; import android.text.TextUtils; import org.matrix.androidsdk.MXDataHandler; import org.matrix.androidsdk.MXSession; import org.matrix.androidsdk.data.MyUser; import org.matrix.androidsdk.data.Room; import org.matrix.androidsdk.listeners.IMXNetworkEventListener; import org.matrix.androidsdk.network.NetworkConnectivityReceiver; import org.matrix.androidsdk.rest.callback.ApiCallback; import org.matrix.androidsdk.rest.callback.SimpleApiCallback; import org.matrix.androidsdk.rest.client.BingRulesRestClient; import org.matrix.androidsdk.rest.model.Event; import org.matrix.androidsdk.rest.model.MatrixError; import org.matrix.androidsdk.rest.model.Message; import org.matrix.androidsdk.rest.model.bingrules.BingRule; import org.matrix.androidsdk.rest.model.bingrules.BingRuleSet; import org.matrix.androidsdk.rest.model.bingrules.BingRulesResponse; import org.matrix.androidsdk.rest.model.bingrules.Condition; import org.matrix.androidsdk.rest.model.bingrules.ContainsDisplayNameCondition; import org.matrix.androidsdk.rest.model.bingrules.ContentRule; import org.matrix.androidsdk.rest.model.bingrules.EventMatchCondition; import org.matrix.androidsdk.rest.model.bingrules.RoomMemberCountCondition; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; /** * Object that gets and processes bing rules from the server. */ public class BingRulesManager { private static final String LOG_TAG = "BingRulesManager"; /** * Bing rule listener */ public interface onBingRuleUpdateListener { /** * The manager succeeds to update the bingrule enable status. */ void onBingRuleUpdateSuccess(); /** * The manager fails to update the bingrule enable status. * @param errorMessage the error message. */ void onBingRuleUpdateFailure(String errorMessage); } /** * Bing rules update */ public interface onBingRulesUpdateListener { /** * Warn that some bing rules have been updated */ void onBingRulesUpdate(); } // general members private final BingRulesRestClient mApiClient; private final MXSession mSession; private final String mMyUserId; private final MXDataHandler mDataHandler; // the rules set to apply private BingRuleSet mRulesSet = new BingRuleSet(); // the rules list private final List<BingRule> mRules = new ArrayList<>(); // the default bing rule private BingRule mDefaultBingRule = new BingRule(true); // tell if the bing rules set is initialized private boolean mIsInitialized = false; // map to check if a room is "mention only" private final Map<String, Boolean> mIsMentionOnlyMap = new HashMap<>(); // network management private NetworkConnectivityReceiver mNetworkConnectivityReceiver; private IMXNetworkEventListener mNetworkListener; private ApiCallback<Void> mLoadRulesCallback; // listener private final Set<onBingRulesUpdateListener> mBingRulesUpdateListeners = new HashSet<>(); /** * Constructor * @param session the session * @param networkConnectivityReceiver the network events listener */ public BingRulesManager(MXSession session, NetworkConnectivityReceiver networkConnectivityReceiver) { mSession = session; mApiClient = session.getBingRulesApiClient(); mMyUserId = session.getCredentials().userId; mDataHandler = session.getDataHandler(); mNetworkListener = new IMXNetworkEventListener() { @Override public void onNetworkConnectionUpdate(boolean isConnected) { // mLoadRulesCallback is set when a loadRules failed // so when a network is available, trigger again loadRules if (isConnected && (null != mLoadRulesCallback)) { loadRules(mLoadRulesCallback); } } }; mNetworkConnectivityReceiver = networkConnectivityReceiver; networkConnectivityReceiver.addEventListener(mNetworkListener); } /** * @return true if it is ready to be used (i.e initializedà */ public boolean isReady() { return mIsInitialized; } /** * Remove the network events listener. * This listener is only used to initialize the rules at application launch. */ private void removeNetworkListener() { if ((null != mNetworkConnectivityReceiver) && (null != mNetworkListener)) { mNetworkConnectivityReceiver.removeEventListener(mNetworkListener); mNetworkConnectivityReceiver = null; mNetworkListener = null; } } /** * Add a listener * @param listener the listener */ public void addBingRulesUpdateListener(onBingRulesUpdateListener listener) { if (null != listener) { mBingRulesUpdateListeners.add(listener); } } /** * remove a listener * @param listener the listener */ public void removeBingRulesUpdateListener(onBingRulesUpdateListener listener) { if (null != listener) { mBingRulesUpdateListeners.remove(listener); } } /** * Some rules have been updated. */ private void onBingRulesUpdate() { for(onBingRulesUpdateListener listener : mBingRulesUpdateListeners) { try { listener.onBingRulesUpdate(); } catch (Exception e) { Log.e(LOG_TAG, "## onBingRulesUpdate() : onBingRulesUpdate failed " + e.getMessage()); } } } /** * Load the bing rules from the server. * @param callback an async callback called when the rules are loaded */ public void loadRules(final ApiCallback<Void> callback) { mLoadRulesCallback = null; Log.d(LOG_TAG, "## loadRules() : refresh the bing rules"); mApiClient.getAllBingRules(new ApiCallback<BingRulesResponse>() { @Override public void onSuccess(BingRulesResponse info) { Log.d(LOG_TAG, "## loadRules() : succeeds"); buildRules(info); mIsInitialized = true; if (callback != null) { callback.onSuccess(null); } removeNetworkListener(); } private void onError(String errorMessage) { Log.e(LOG_TAG, "## loadRules() : failed " + errorMessage); // the callback will be called when the request will succeed mLoadRulesCallback = callback; } @Override public void onNetworkError(Exception e) { onError(e.getMessage()); } @Override public void onMatrixError(MatrixError e) { onError(e.getMessage()); } @Override public void onUnexpectedError(Exception e) { onError(e.getMessage()); } }); } /** * Update the rule enable status. * @param kind the rule kind. * @param ruleId the rule ID. * @param status the new enable status. * @param callback an async callback. */ public void updateEnableRuleStatus(String kind, String ruleId, boolean status, final ApiCallback<Void> callback) { mApiClient.updateEnableRuleStatus(kind, ruleId, status, callback); } /** * Returns whether a string contains an occurrence of another, as a standalone word, regardless of case. * @param subString the string to search for * @param longString the string to search in * @return whether a match was found */ private static boolean caseInsensitiveFind(String subString, String longString) { // sanity check if (TextUtils.isEmpty(subString) || TextUtils.isEmpty(longString)) { return false; } boolean found = false; try { Pattern pattern = Pattern.compile("(\\W|^)" + subString + "(\\W|$)", Pattern.CASE_INSENSITIVE); found = pattern.matcher(longString).find(); } catch (Exception e) { Log.e(LOG_TAG, "caseInsensitiveFind : pattern.matcher failed with " + e.getMessage()); } return found; } /** * Returns the first notifiable bing rule which fulfills its condition with this event. * @param event the event * @return the first matched bing rule, null if none */ public BingRule fulfilledBingRule(Event event) { // sanity check if (null == event) { Log.e(LOG_TAG, "## fulfilledBingRule() : null event"); return null; } if (!mIsInitialized) { Log.e(LOG_TAG, "## fulfilledBingRule() : not initialized"); return null; } if (0 == mRules.size()) { Log.e(LOG_TAG, "## fulfilledBingRule() : no rules"); return null; } // do not trigger notification for oneself messages if ((null != event.getSender()) && TextUtils.equals(event.getSender(), mMyUserId)) { return null; } String eventType = event.getType(); // some types are not bingable if (TextUtils.equals(eventType, Event.EVENT_TYPE_PRESENCE) || TextUtils.equals(eventType, Event.EVENT_TYPE_TYPING) || TextUtils.equals(eventType, Event.EVENT_TYPE_REDACTION) || TextUtils.equals(eventType, Event.EVENT_TYPE_RECEIPT) || TextUtils.equals(eventType, Event.EVENT_TYPE_TAGS)) { return null; } // GA issue final ArrayList<BingRule> rules; synchronized (this) { rules = new ArrayList<>(mRules); } // Go down the rule list until we find a match for (BingRule bingRule : rules) { if (bingRule.isEnabled) { boolean isFullfilled = false; // some rules have no condition // so their ruleId defines the method if (BingRule.RULE_ID_CONTAIN_USER_NAME.equals(bingRule.ruleId) || BingRule.RULE_ID_CONTAIN_DISPLAY_NAME.equals(bingRule.ruleId)) { if (Event.EVENT_TYPE_MESSAGE.equals(event.getType())) { Message message = JsonUtils.toMessage(event.getContent()); MyUser myUser = mSession.getMyUser(); String pattern = null; if (BingRule.RULE_ID_CONTAIN_USER_NAME.equals(bingRule.ruleId)) { if (mMyUserId.indexOf(":") >= 0) { pattern = mMyUserId.substring(1, mMyUserId.indexOf(":")); } else { pattern = mMyUserId; } } else if (BingRule.RULE_ID_CONTAIN_DISPLAY_NAME.equals(bingRule.ruleId)) { pattern = myUser.displayname; if ((null != mSession.getDataHandler()) && (null != mSession.getDataHandler().getStore())) { Room room = mSession.getDataHandler().getStore().getRoom(event.roomId); if ((null != room) && (null != room.getLiveState())) { String disambiguousedName = room.getLiveState().getMemberName(mMyUserId); if (!TextUtils.equals(disambiguousedName, mMyUserId)) { pattern = Pattern.quote(disambiguousedName); } } } } if (!TextUtils.isEmpty(pattern)) { isFullfilled = caseInsensitiveFind(pattern, message.body); } } } else if (BingRule.RULE_ID_FALLBACK.equals(bingRule.ruleId)) { isFullfilled = true; } else { // some default rules define conditions // so use them instead of doing a custom treatment // RULE_ID_ONE_TO_ONE_ROOM // RULE_ID_SUPPRESS_BOTS_NOTIFICATIONS isFullfilled = eventMatchesConditions(event, bingRule.conditions); } if (isFullfilled) { return bingRule; } } } // no rules are fulfilled return null; } /** * Check if an event matches a conditions set * @param event the evnt to test * @param conditions the conditions set * @return true if the event matches all the conditions set. */ private boolean eventMatchesConditions(Event event, List<Condition> conditions) { try { if ((conditions != null) && (event != null)) { for (Condition condition : conditions) { if (condition instanceof EventMatchCondition) { if (!((EventMatchCondition) condition).isSatisfied(event)) { return false; } } else if (condition instanceof ContainsDisplayNameCondition) { if (event.roomId != null) { Room room = mDataHandler.getRoom(event.roomId, false); // sanity checks if ((null != room) && (null != room.getMember(mMyUserId))) { // Best way to get your display name for now String myDisplayName = room.getMember(mMyUserId).displayname; if (!((ContainsDisplayNameCondition) condition).isSatisfied(event, myDisplayName)) { return false; } } } } else if (condition instanceof RoomMemberCountCondition) { if (event.roomId != null) { Room room = mDataHandler.getRoom(event.roomId, false); if (!((RoomMemberCountCondition) condition).isSatisfied(room)) { return false; } } } // FIXME: Handle device rules } } } catch (Exception e) { Log.e(LOG_TAG, "## eventMatchesConditions() failed " + e.getMessage()); return false; } return true; } /** * Build the internal build rules * @param bingRulesResponse the server request response. */ public void buildRules(BingRulesResponse bingRulesResponse) { if (null != bingRulesResponse) { updateRules(bingRulesResponse.global); onBingRulesUpdate(); } } /** * @return the rules set */ public BingRuleSet pushRules() { return mRulesSet; } /** * Update mRulesSet with the new one. * @param ruleSet the new ruleSet to apply */ private void updateRules(BingRuleSet ruleSet) { synchronized (this) { // clear the rules list // it is mRules.clear(); // sanity check if (null == ruleSet) { mRulesSet = new BingRuleSet(); return; } // Replace the list by ArrayList to be able to add/remove rules // Add the rule kind in each rule // Ensure that the null pointers are replaced by an empty list if (ruleSet.override != null) { ruleSet.override = new ArrayList<>(ruleSet.override); for (BingRule rule : ruleSet.override) { rule.kind = BingRule.KIND_OVERRIDE; } mRules.addAll(ruleSet.override); } else { ruleSet.override = new ArrayList<>(ruleSet.override); } if (ruleSet.content != null) { ruleSet.content = new ArrayList<>(ruleSet.content); for (BingRule rule : ruleSet.content) { rule.kind = BingRule.KIND_CONTENT; } addContentRules(ruleSet.content); } else { ruleSet.content = new ArrayList<>(); } mIsMentionOnlyMap.clear(); if (ruleSet.room != null) { ruleSet.room = new ArrayList<>(ruleSet.room); for (BingRule rule : ruleSet.room) { rule.kind = BingRule.KIND_ROOM; } addRoomRules(ruleSet.room); } else { ruleSet.room = new ArrayList<>(); } if (ruleSet.sender != null) { ruleSet.sender = new ArrayList<>(ruleSet.sender); for (BingRule rule : ruleSet.sender) { rule.kind = BingRule.KIND_SENDER; } addSenderRules(ruleSet.sender); } else { ruleSet.sender = new ArrayList<>(); } if (ruleSet.underride != null) { ruleSet.underride = new ArrayList<>(ruleSet.underride); for (BingRule rule : ruleSet.underride) { rule.kind = BingRule.KIND_UNDERRIDE; } mRules.addAll(ruleSet.underride); } else { ruleSet.underride = new ArrayList<>(); } mRulesSet = ruleSet; Log.d(LOG_TAG, "## updateRules() : has " + mRules.size() + " rules"); } } /** * Create a content EventMatchConditions list from a ContentRules list * @param rules the ContentRules list */ private void addContentRules(List<ContentRule> rules) { // sanity check if (null != rules) { for (ContentRule rule : rules) { EventMatchCondition condition = new EventMatchCondition(); condition.kind = Condition.KIND_EVENT_MATCH; condition.key = "content.body"; condition.pattern = rule.pattern; rule.addCondition(condition); mRules.add(rule); } } } /** * Create a room EventMatchConditions list from a BingRule list * @param rules the BingRule list */ private void addRoomRules(List<BingRule> rules) { if (null != rules) { for (BingRule rule : rules) { EventMatchCondition condition = new EventMatchCondition(); condition.kind = Condition.KIND_EVENT_MATCH; condition.key = "room_id"; condition.pattern = rule.ruleId; rule.addCondition(condition); mRules.add(rule); } } } /** * Create a sender EventMatchConditions list from a BingRule list * @param rules the BingRule list */ private void addSenderRules(List<BingRule> rules) { if (null != rules) { for (BingRule rule : rules) { EventMatchCondition condition = new EventMatchCondition(); condition.kind = Condition.KIND_EVENT_MATCH; condition.key = "user_id"; condition.pattern = rule.ruleId; rule.addCondition(condition); mRules.add(rule); } } } /** * Toogle a rule. * @param rule the bing rule to toggle. * @param listener the rule update listener. * @return the matched bing rule or null it doesn't exist. */ public BingRule toggleRule(final BingRule rule, final onBingRuleUpdateListener listener) { if (null != rule) { updateEnableRuleStatus(rule.kind, rule.ruleId, !rule.isEnabled, new SimpleApiCallback<Void>() { @Override public void onSuccess(Void info) { rule.isEnabled = !rule.isEnabled; updateRules(mRulesSet); onBingRulesUpdate(); if (listener != null) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## toggleRule : onBingRuleUpdateSuccess failed " + e.getMessage()); } } } private void onError(String message) { if (null != listener) { try { listener.onBingRuleUpdateFailure(message); } catch (Exception e) { Log.e(LOG_TAG, "## onError : onBingRuleUpdateFailure failed " + e.getMessage()); } } } /** * Called if there is a network error. * @param e the exception */ @Override public void onNetworkError(Exception e) { onError(e.getLocalizedMessage()); } /** * Called in case of a Matrix error. * @param e the Matrix error */ @Override public void onMatrixError(MatrixError e) { onError(e.getLocalizedMessage()); } /** * Called for some other type of error. * @param e the exception */ @Override public void onUnexpectedError(Exception e) { onError(e.getLocalizedMessage()); } }); } return rule; } /** * Delete the rule. * @param rule the rule to delete. * @param listener the rule update listener. */ public void deleteRule(final BingRule rule, final onBingRuleUpdateListener listener) { // null case if (null == rule) { if (listener != null) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## deleteRule : onBingRuleUpdateSuccess failed " + e.getMessage()); } } return; } mApiClient.deleteRule(rule.kind, rule.ruleId, new SimpleApiCallback<Void>() { @Override public void onSuccess(Void info) { if (null != mRulesSet) { mRulesSet.remove(rule); updateRules(mRulesSet); onBingRulesUpdate(); } if (listener != null) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## deleteRule : onBingRuleUpdateSuccess failed " + e.getMessage()); } } } private void onError(String message) { if (null != listener) { try { listener.onBingRuleUpdateFailure(message); } catch (Exception e) { Log.e(LOG_TAG, "## onError : onBingRuleUpdateFailure failed " + e.getMessage()); } } } /** * Called if there is a network error. * @param e the exception */ @Override public void onNetworkError(Exception e) { onError(e.getLocalizedMessage()); } /** * Called in case of a Matrix error. * @param e the Matrix error */ @Override public void onMatrixError(MatrixError e) { onError(e.getLocalizedMessage()); } /** * Called for some other type of error. * @param e the exception */ @Override public void onUnexpectedError(Exception e) { onError(e.getLocalizedMessage()); } }); } /** * Delete a rules list. * @param rules the rules to delete * @param listener the listener when the rules are deleted */ public void deleteRules(final List<BingRule> rules, final onBingRuleUpdateListener listener) { deleteRules(rules, 0, listener); } /** * Recursive rules deletion method. * @param rules the rules to delete * @param index the rule index * @param listener the listener when the rules are deleted */ private void deleteRules(final List<BingRule> rules, final int index, final onBingRuleUpdateListener listener) { // sanity checks if ((null == rules) || (index >= rules.size())) { onBingRulesUpdate(); if (null != listener) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## deleteRules() : onBingRuleUpdateSuccess failed " + e.getMessage()); } } return; } // delete the rule deleteRule(rules.get(index), new onBingRuleUpdateListener() { @Override public void onBingRuleUpdateSuccess() { deleteRules(rules, index+1, listener); } @Override public void onBingRuleUpdateFailure(String errorMessage) { if (null != listener) { try { listener.onBingRuleUpdateFailure(errorMessage); } catch (Exception e) { Log.e(LOG_TAG, "## deleteRules() : onBingRuleUpdateFailure failed " + e.getMessage()); } } } }); } /** * Add a rule. * @param rule the rule to delete. * @param listener the rule update listener. */ public void addRule(final BingRule rule, final onBingRuleUpdateListener listener) { // null case if (null == rule) { if (listener != null) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## addRule : onBingRuleUpdateSuccess failed " + e.getMessage()); } } return; } mApiClient.addRule(rule, new SimpleApiCallback<Void>() { @Override public void onSuccess(Void info) { if (null != mRulesSet) { mRulesSet.addAtTop(rule); updateRules(mRulesSet); onBingRulesUpdate(); } if (listener != null) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## addRule : onBingRuleUpdateSuccess failed " + e.getMessage()); } } } private void onError(String message) { if (null != listener) { try { listener.onBingRuleUpdateFailure(message); } catch (Exception e) { Log.e(LOG_TAG, "## addRule : onBingRuleUpdateFailure failed " + e.getMessage()); } } } /** * Called if there is a network error. * @param e the exception */ @Override public void onNetworkError(Exception e) { onError(e.getLocalizedMessage()); } /** * Called in case of a Matrix error. * @param e the Matrix error */ @Override public void onMatrixError(MatrixError e) { onError(e.getLocalizedMessage()); } /** * Called for some other type of error. * @param e the exception */ @Override public void onUnexpectedError(Exception e) { onError(e.getLocalizedMessage()); } }); } /** * Search the push rules for the room id * @param roomId the room id * @return the room rules list */ public List<BingRule> getPushRulesForRoomId(String roomId) { ArrayList<BingRule> rules = new ArrayList<>(); // sanity checks if (!TextUtils.isEmpty(roomId) && (null != mRulesSet)) { // the webclient defines two ways to set a room rule // mention only : the user won't have any push for the room except if a content rule is fullfilled // mute : no notification for this room // mute rules are defined in override groups if (null != mRulesSet.override) { for (BingRule roomRule : mRulesSet.override) { if (TextUtils.equals(roomRule.ruleId, roomId)) { rules.add(roomRule); } } } // mention only are defined in room group if (null != mRulesSet.room) { for (BingRule roomRule : mRulesSet.room) { if (TextUtils.equals(roomRule.ruleId, roomId)) { rules.add(roomRule); } } } } return rules; } /** * Tell whether the regular notifications are disabled for the room. * @param roomId the room id * @return true if the regular notifications are disabled (mention only) */ public boolean isRoomMentionOnly(String roomId) { // sanity check if (!TextUtils.isEmpty(roomId)) { if (mIsMentionOnlyMap.containsKey(roomId)) { return mIsMentionOnlyMap.get(roomId); } if (null != mRulesSet.room) { for (BingRule roomRule : mRulesSet.room) { if (TextUtils.equals(roomRule.ruleId, roomId)) { List<BingRule> roomRules = getPushRulesForRoomId(roomId); if (0 != roomRules.size()) { for (BingRule rule : roomRules) { if (rule.shouldNotNotify()) { mIsMentionOnlyMap.put(roomId, rule.isEnabled); return rule.isEnabled; } } } } } } mIsMentionOnlyMap.put(roomId, false); } return false; } /** * Test if the room has a dedicated rule which disables notification. * @param roomId the roomId * @return true if there is a rule to disable notifications. */ public boolean isRoomNotificationsDisabled(String roomId) { List<BingRule> roomRules = getPushRulesForRoomId(roomId); if (0 != roomRules.size()) { for(BingRule rule : roomRules) { if (!rule.shouldNotify() && rule.isEnabled) { return true; } } } return false; } /** * Mute / unmute the room notifications. * Only the room rules are checked. * * @param roomId the room id to mute / unmute. * @param isMuted set to true to mute the notification * @param listener the listener. */ public void muteRoomNotifications(final String roomId, final boolean isMuted, final onBingRuleUpdateListener listener) { List<BingRule> bingRules = getPushRulesForRoomId(roomId); // the mobile client only supports to define a "mention only" rule i.e a rule defined in the room rules set. // delete the rule and create a new one deleteRules(bingRules, new onBingRuleUpdateListener() { @Override public void onBingRuleUpdateSuccess() { if (isMuted) { addRule(new BingRule(BingRule.KIND_ROOM, roomId, false, false, false), listener); } else if (null != listener) { try { listener.onBingRuleUpdateSuccess(); } catch (Exception e) { Log.e(LOG_TAG, "## muteRoomNotifications() : onBingRuleUpdateSuccess failed " + e.getMessage()); } } } @Override public void onBingRuleUpdateFailure(String errorMessage) { if (null != listener) { try { listener.onBingRuleUpdateFailure(errorMessage); } catch (Exception e) { Log.e(LOG_TAG, "## muteRoomNotifications() : onBingRuleUpdateFailure failed " + e.getMessage()); } } } }); } }
Add BingRulesManager.fulfilledHighlightBingRule to check only the highlighted rules
matrix-sdk/src/main/java/org/matrix/androidsdk/util/BingRulesManager.java
Add BingRulesManager.fulfilledHighlightBingRule to check only the highlighted rules
Java
apache-2.0
9efa7a931c2e85f6731c1d4f1908e76d134c24d4
0
endeavourhealth/EDS,endeavourhealth/EDS,endeavourhealth/EDS,endeavourhealth/EDS,endeavourhealth/EDS
package org.endeavourhealth.queuereader; import OpenPseudonymiser.Crypto; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.S3Object; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; import com.google.common.collect.Lists; import org.apache.commons.csv.*; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.endeavourhealth.common.cache.ObjectMapperPool; import org.endeavourhealth.common.config.ConfigManager; import org.endeavourhealth.common.fhir.*; import org.endeavourhealth.common.utility.FileHelper; import org.endeavourhealth.common.utility.SlackHelper; import org.endeavourhealth.core.configuration.ConfigDeserialiser; import org.endeavourhealth.core.configuration.PostMessageToExchangeConfig; import org.endeavourhealth.core.configuration.QueueReaderConfiguration; import org.endeavourhealth.core.csv.CsvHelper; import org.endeavourhealth.core.database.dal.DalProvider; import org.endeavourhealth.core.database.dal.admin.ServiceDalI; import org.endeavourhealth.core.database.dal.admin.models.Service; import org.endeavourhealth.core.database.dal.audit.ExchangeBatchDalI; import org.endeavourhealth.core.database.dal.audit.ExchangeDalI; import org.endeavourhealth.core.database.dal.audit.models.*; import org.endeavourhealth.core.database.dal.eds.PatientLinkDalI; import org.endeavourhealth.core.database.dal.eds.PatientSearchDalI; import org.endeavourhealth.core.database.dal.ehr.ResourceDalI; import org.endeavourhealth.core.database.dal.ehr.models.ResourceWrapper; import org.endeavourhealth.core.database.dal.publisherTransform.models.ResourceFieldMapping; import org.endeavourhealth.core.database.dal.reference.PostcodeDalI; import org.endeavourhealth.core.database.dal.reference.models.PostcodeLookup; import org.endeavourhealth.core.database.dal.subscriberTransform.EnterpriseAgeUpdaterlDalI; import org.endeavourhealth.core.database.dal.subscriberTransform.EnterpriseIdDalI; import org.endeavourhealth.core.database.dal.subscriberTransform.models.EnterpriseAge; import org.endeavourhealth.core.database.rdbms.ConnectionManager; import org.endeavourhealth.core.exceptions.TransformException; import org.endeavourhealth.core.fhirStorage.FhirSerializationHelper; import org.endeavourhealth.core.fhirStorage.FhirStorageService; import org.endeavourhealth.core.fhirStorage.JsonServiceInterfaceEndpoint; import org.endeavourhealth.core.messaging.pipeline.components.PostMessageToExchange; import org.endeavourhealth.core.queueing.QueueHelper; import org.endeavourhealth.core.xml.TransformErrorSerializer; import org.endeavourhealth.core.xml.transformError.TransformError; import org.endeavourhealth.subscriber.filer.EnterpriseFiler; import org.endeavourhealth.transform.barts.transforms.PPADDTransformer; import org.endeavourhealth.transform.barts.transforms.PPNAMTransformer; import org.endeavourhealth.transform.barts.transforms.PPPHOTransformer; import org.endeavourhealth.transform.common.*; import org.endeavourhealth.transform.common.resourceBuilders.PatientBuilder; import org.endeavourhealth.transform.emis.EmisCsvToFhirTransformer; import org.endeavourhealth.transform.emis.csv.helpers.EmisCsvHelper; import org.endeavourhealth.transform.enterprise.json.LinkDistributorConfig; import org.endeavourhealth.transform.enterprise.transforms.PatientTransformer; import org.hibernate.internal.SessionImpl; import org.hl7.fhir.instance.model.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.persistence.EntityManager; import java.io.*; import java.lang.reflect.Constructor; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.sql.*; import java.text.SimpleDateFormat; import java.util.*; import java.util.Date; import java.util.concurrent.atomic.AtomicInteger; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; public class Main { private static final Logger LOG = LoggerFactory.getLogger(Main.class); public static void main(String[] args) throws Exception { String configId = args[0]; LOG.info("Initialising config manager"); ConfigManager.initialize("queuereader", configId); /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEncounters")) { String table = args[1]; fixEncounters(table); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CreateHomertonSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createHomertonSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateAdastraSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createAdastraSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateVisionSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createVisionSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateTppSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createTppSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateBartsSubset")) { String sourceDirPath = args[1]; UUID serviceUuid = UUID.fromString(args[2]); UUID systemUuid = UUID.fromString(args[3]); String samplePatientsFile = args[4]; createBartsSubset(sourceDirPath, serviceUuid, systemUuid, samplePatientsFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsOrgs")) { String serviceId = args[1]; fixBartsOrgs(serviceId); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestPreparedStatements")) { String url = args[1]; String user = args[2]; String pass = args[3]; String serviceId = args[4]; testPreparedStatements(url, user, pass, serviceId); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CreateTransformMap")) { UUID serviceId = UUID.fromString(args[1]); String table = args[2]; String dstFile = args[3]; createTransforMap(serviceId, table, dstFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("ExportFhirToCsv")) { UUID serviceId = UUID.fromString(args[1]); String path = args[2]; exportFhirToCsv(serviceId, path); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("TestBatchInserts")) { String url = args[1]; String user = args[2]; String pass = args[3]; String num = args[4]; String batchSize = args[5]; testBatchInserts(url, user, pass, num, batchSize); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("ApplyEmisAdminCaches")) { applyEmisAdminCaches(); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixSubscribers")) { fixSubscriberDbs(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems")) { String serviceId = args[1]; String systemId = args[2]; fixEmisProblems(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("TestS3Read")) { String s3Bucket = args[1]; String s3Key = args[2]; String start = args[3]; String len = args[4]; testS3Read(s3Bucket, s3Key, start, len); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems3ForPublisher")) { String publisherId = args[1]; String systemId = args[2]; fixEmisProblems3ForPublisher(publisherId, UUID.fromString(systemId)); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems3")) { String serviceId = args[1]; String systemId = args[2]; fixEmisProblems3(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CheckDeletedObs")) { String serviceId = args[1]; String systemId = args[2]; checkDeletedObs(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixPersonsNoNhsNumber")) { fixPersonsNoNhsNumber(); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateSubscriberUprnTable")) { String subscriberConfigName = args[1]; populateSubscriberUprnTable(subscriberConfigName); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("PostToRabbit")) { String exchangeName = args[1]; String srcFile = args[2]; postToRabbit(exchangeName, srcFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostToProtocol")) { String srcFile = args[1]; postToProtocol(srcFile); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsPatients")) { UUID serviceId = UUID.fromString(args[1]); fixBartsPatients(serviceId); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixDeceasedPatients")) { String subscriberConfig = args[1]; fixDeceasedPatients(subscriberConfig); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixPseudoIds")) { String subscriberConfig = args[1]; int threads = Integer.parseInt(args[2]); fixPseudoIds(subscriberConfig, threads); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("ConvertExchangeBody")) { String systemId = args[1]; convertExchangeBody(UUID.fromString(systemId)); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixReferrals")) { fixReferralRequests(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateNewSearchTable")) { String table = args[1]; populateNewSearchTable(table); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsEscapes")) { String filePath = args[1]; fixBartsEscapedFiles(filePath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostToInbound")) { String serviceId = args[1]; String systemId = args[2]; String filePath = args[3]; postToInboundFromFile(UUID.fromString(serviceId), UUID.fromString(systemId), filePath); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("FixDisabledExtract")) { String sharedStoragePath = args[1]; String tempDir = args[2]; String systemId = args[3]; String serviceOdsCode = args[4]; fixDisabledEmisExtract(serviceOdsCode, systemId, sharedStoragePath, tempDir); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestSlack")) { testSlack(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostToInbound")) { String serviceId = args[1]; boolean all = Boolean.parseBoolean(args[2]); postToInbound(UUID.fromString(serviceId), all); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("FixPatientSearch")) { String serviceId = args[1]; String systemId = null; if (args.length > 2) { systemId = args[2]; } if (serviceId.equalsIgnoreCase("All")) { fixPatientSearchAllServices(systemId); } else { fixPatientSearch(serviceId, systemId); } System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixSlotReferences")) { String serviceId = args[1]; try { UUID serviceUuid = UUID.fromString(serviceId); fixSlotReferences(serviceUuid); } catch (Exception ex) { fixSlotReferencesForPublisher(serviceId); } System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("TestS3VsMySQL")) { UUID serviceUuid = UUID.fromString(args[1]); int count = Integer.parseInt(args[2]); int sqlBatchSize = Integer.parseInt(args[3]); String bucketName = args[4]; testS3VsMySql(serviceUuid, count, sqlBatchSize, bucketName); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("Exit")) { String exitCode = args[1]; LOG.info("Exiting with error code " + exitCode); int exitCodeInt = Integer.parseInt(exitCode); System.exit(exitCodeInt); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("RunSql")) { String host = args[1]; String username = args[2]; String password = args[3]; String sqlFile = args[4]; runSql(host, username, password, sqlFile); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateProtocolQueue")) { String serviceId = null; if (args.length > 1) { serviceId = args[1]; } String startingExchangeId = null; if (args.length > 2) { startingExchangeId = args[2]; } populateProtocolQueue(serviceId, startingExchangeId); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindEncounterTerms")) { String path = args[1]; String outputPath = args[2]; findEncounterTerms(path, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindEmisStartDates")) { String path = args[1]; String outputPath = args[2]; findEmisStartDates(path, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("ExportHl7Encounters")) { String sourceCsvPpath = args[1]; String outputPath = args[2]; exportHl7Encounters(sourceCsvPpath, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixExchangeBatches")) { fixExchangeBatches(); System.exit(0); }*/ /*if (args.length >= 0 && args[0].equalsIgnoreCase("FindCodes")) { findCodes(); System.exit(0); }*/ /*if (args.length >= 0 && args[0].equalsIgnoreCase("FindDeletedOrgs")) { findDeletedOrgs(); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("LoadBartsData")) { String serviceId = args[1]; String systemId = args[2]; String dbUrl = args[3]; String dbUsername = args[4]; String dbPassword = args[5]; String onlyThisFileType = null; if (args.length > 6) { onlyThisFileType = args[6]; } loadBartsData(serviceId, systemId, dbUrl, dbUsername, dbPassword, onlyThisFileType); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateBartsDataTables")) { createBartsDataTables(); System.exit(0); } if (args.length != 1) { LOG.error("Usage: queuereader config_id"); return; } LOG.info("--------------------------------------------------"); LOG.info("EDS Queue Reader " + configId); LOG.info("--------------------------------------------------"); LOG.info("Fetching queuereader configuration"); String configXml = ConfigManager.getConfiguration(configId); QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); /*LOG.info("Registering shutdown hook"); registerShutdownHook();*/ // Instantiate rabbit handler LOG.info("Creating EDS queue reader"); RabbitHandler rabbitHandler = new RabbitHandler(configuration, configId); // Begin consume rabbitHandler.start(); LOG.info("EDS Queue reader running (kill file location " + TransformConfig.instance().getKillFileLocation() + ")"); } private static void testS3VsMySql(UUID serviceUuid, int count, int sqlBatchSize, String bucketName) { LOG.debug("Testing S3 vs MySQL for service " + serviceUuid); try { //retrieve some audit JSON from the DB EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); String sql = "select resource_id, resource_type, version, mappings_json" + " from resource_field_mappings" + " where mappings_json != '[]'"; if (count > -1) { sql += "limit " + count + ";"; } Statement statement = connection.createStatement(); statement.setFetchSize(1000); ResultSet rs = statement.executeQuery(sql); List<ResourceFieldMapping> list = new ArrayList<>(); while (rs.next()) { int col = 1; String resourceId = rs.getString(col++); String resourceType = rs.getString(col++); String version = rs.getString(col++); String json = rs.getString(col++); ResourceFieldMapping obj = new ResourceFieldMapping(); obj.setResourceId(UUID.fromString(resourceId)); obj.setResourceType(resourceType); obj.setVersion(UUID.fromString(version)); obj.setResourceField(json); list.add(obj); } rs.close(); statement.close(); entityManager.close(); int done = 0; //test writing to S3 long s3Start = System.currentTimeMillis(); LOG.debug("Doing S3 test"); for (int i=0; i<list.size(); i++) { ResourceFieldMapping mapping = list.get(i); String entryName = mapping.getVersion().toString() + ".json"; String keyName = "auditTest/" + serviceUuid + "/" + mapping.getResourceType() + "/" + mapping.getResourceId() + "/" + mapping.getVersion() + ".zip"; String jsonStr = mapping.getResourceField(); //may as well zip the data, since it will compress well ByteArrayOutputStream baos = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(baos); zos.putNextEntry(new ZipEntry(entryName)); zos.write(jsonStr.getBytes()); zos.flush(); zos.close(); byte[] bytes = baos.toByteArray(); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes); //ProfileCredentialsProvider credentialsProvider = new ProfileCredentialsProvider(); DefaultAWSCredentialsProviderChain credentialsProvider = DefaultAWSCredentialsProviderChain.getInstance(); AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder .standard() .withCredentials(credentialsProvider) .withRegion(Regions.EU_WEST_2); AmazonS3 s3Client = clientBuilder.build(); ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); objectMetadata.setContentLength(bytes.length); PutObjectRequest putRequest = new PutObjectRequest(bucketName, keyName, byteArrayInputStream, objectMetadata); s3Client.putObject(putRequest); done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + list.size()); } } long s3End = System.currentTimeMillis(); LOG.debug("S3 took " + (s3End - s3Start) + " ms"); //test inserting into a DB long sqlStart = System.currentTimeMillis(); LOG.debug("Doing SQL test"); sql = "insert into drewtest.json_speed_test (resource_id, resource_type, created_at, version, mappings_json) values (?, ?, ?, ?, ?)"; entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); PreparedStatement ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); done = 0; int currentBatchSize = 0; for (int i=0; i<list.size(); i++) { ResourceFieldMapping mapping = list.get(i); int col = 1; ps.setString(col++, mapping.getResourceId().toString()); ps.setString(col++, mapping.getResourceType()); ps.setDate(col++, new java.sql.Date(System.currentTimeMillis())); ps.setString(col++, mapping.getVersion().toString()); ps.setString(col++, mapping.getResourceField()); ps.addBatch(); currentBatchSize ++; if (currentBatchSize >= sqlBatchSize || i+1 == list.size()) { ps.executeBatch(); entityManager.getTransaction().commit(); //mirror what would happen normally ps.close(); entityManager.close(); if (i+1 < list.size()) { entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); } } done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + list.size()); } } long sqlEnd = System.currentTimeMillis(); LOG.debug("SQL took " + (sqlEnd - sqlStart) + " ms"); LOG.debug("Finished Testing S3 vs MySQL for service " + serviceUuid); } catch (Throwable t) { LOG.error("", t); } } private static void createBartsDataTables() { LOG.debug("Creating Barts data tables"); try { List<String> fileTypes = new ArrayList<>(); fileTypes.add("AEATT"); fileTypes.add("Birth"); //fileTypes.add("BulkDiagnosis"); //fileTypes.add("BulkProblem"); //fileTypes.add("BulkProcedure"); fileTypes.add("CLEVE"); fileTypes.add("CVREF"); fileTypes.add("Diagnosis"); fileTypes.add("ENCINF"); fileTypes.add("ENCNT"); fileTypes.add("FamilyHistory"); fileTypes.add("IPEPI"); fileTypes.add("IPWDS"); fileTypes.add("LOREF"); fileTypes.add("NOMREF"); fileTypes.add("OPATT"); fileTypes.add("ORGREF"); fileTypes.add("PPADD"); fileTypes.add("PPAGP"); fileTypes.add("PPALI"); fileTypes.add("PPINF"); fileTypes.add("PPNAM"); fileTypes.add("PPPHO"); fileTypes.add("PPREL"); fileTypes.add("Pregnancy"); fileTypes.add("Problem"); fileTypes.add("PROCE"); fileTypes.add("Procedure"); fileTypes.add("PRSNLREF"); fileTypes.add("SusEmergency"); fileTypes.add("SusInpatient"); fileTypes.add("SusOutpatient"); //fileTypes.add("Tails"); TODO - have three separate tails files fileTypes.add("EventCode"); fileTypes.add("EventSetCanon"); fileTypes.add("EventSet"); fileTypes.add("EventSetExplode"); fileTypes.add("BlobContent"); fileTypes.add("SusInpatientTail"); fileTypes.add("SusOutpatientTail"); fileTypes.add("SusEmergencyTail"); fileTypes.add("AEINV"); fileTypes.add("AETRE"); fileTypes.add("OPREF"); fileTypes.add("STATREF"); fileTypes.add("RTTPE"); fileTypes.add("PPATH"); fileTypes.add("DOCRP"); fileTypes.add("SCHAC"); fileTypes.add("EALEN"); fileTypes.add("DELIV"); fileTypes.add("EALOF"); fileTypes.add("SusEmergencyCareDataSet"); fileTypes.add("SusEmergencyCareDataSetTail"); for (String fileType: fileTypes) { createBartsDataTable(fileType); } LOG.debug("Finished Creating Barts data tables"); } catch (Throwable t) { LOG.error("", t); } } private static void createBartsDataTable(String fileType) throws Exception { ParserI parser = null; try { String clsName = "org.endeavourhealth.transform.barts.schema." + fileType; Class cls = Class.forName(clsName); //now construct an instance of the parser for the file we've found Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class); parser = constructor.newInstance(null, null, null, null, null); } catch (ClassNotFoundException cnfe) { System.out.println("-- No parser for file type [" + fileType + "]"); return; } System.out.println("-- " + fileType); String table = fileType.replace(" ", "_"); String dropSql = "DROP TABLE IF EXISTS `" + table + "`;"; System.out.println(dropSql); String sql = "CREATE TABLE `" + table + "` ("; sql += "file_name varchar(100)"; if (parser instanceof AbstractFixedParser) { AbstractFixedParser fixedParser = (AbstractFixedParser)parser; List<FixedParserField> fields = fixedParser.getFieldList(); for (FixedParserField field: fields) { String col = field.getName(); int len = field.getFieldlength(); sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); sql += " varchar("; sql += len; sql += ")"; } } else { List<String> cols = parser.getColumnHeaders(); for (String col: cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); if (col.equals("BLOB_CONTENTS") || col.equals("VALUE_LONG_TXT") || col.equals("COMMENT_TXT") || col.equals("NONPREG_REL_PROBLM_SCT_CD")) { sql += " mediumtext"; } else if (col.indexOf("Date") > -1 || col.indexOf("Time") > -1) { sql += " varchar(10)"; } else { sql += " varchar(255)"; } } } sql += ");"; /*LOG.debug("-- fileType"); LOG.debug(sql);*/ System.out.println(sql); } private static void loadBartsData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String onlyThisFileType) { LOG.debug("Loading Barts data from into " + dbUrl); try { //hash file type of every file ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); Date startDate = sdf.parse("2018-11-01"); //Date startDate = sdf.parse("2018-09-17"); //Date endDate = sdf.parse("2018-09-30"); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); String exchangeBody = exchange.getBody(); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody); if (files.isEmpty()) { continue; } for (ExchangePayloadFile file: files) { String type = file.getType(); String path = file.getPath(); //if only doing a specific file type, skip all others if (onlyThisFileType != null && !type.equals(onlyThisFileType)) { continue; } boolean processFile = false; if (type.equalsIgnoreCase("CVREF") || type.equalsIgnoreCase("LOREF") || type.equalsIgnoreCase("ORGREF") || type.equalsIgnoreCase("PRSNLREF") || type.equalsIgnoreCase("NOMREF")) { processFile = true; } else { File f = new File(path); File parentFile = f.getParentFile(); String parentDir = parentFile.getName(); Date extractDate = sdf.parse(parentDir); if (!extractDate.before(startDate)) { processFile = true; } /*if (!extractDate.before(startDate) && !extractDate.after(endDate)) { processFile = true; }*/ } if (processFile) { loadBartsDataFromFile(conn, path, type); } } } conn.close(); LOG.debug("Finished Loading Barts data from into " + dbUrl); } catch (Throwable t) { LOG.error("", t); } } private static void loadBartsDataFromFile(Connection conn, String filePath, String fileType) throws Exception { LOG.debug("Loading " + fileType + ": " + filePath); String fileName = FilenameUtils.getName(filePath); ParserI parser = null; try { String clsName = "org.endeavourhealth.transform.barts.schema." + fileType; Class cls = Class.forName(clsName); //now construct an instance of the parser for the file we've found Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class); parser = constructor.newInstance(null, null, null, null, filePath); } catch (ClassNotFoundException cnfe) { LOG.error("No parser for file type [" + fileType + "]"); return; } String table = fileType.replace(" ", "_"); //check table is there String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1"; Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery(sql); boolean tableExists = rs.next(); rs.close(); statement.close(); if (!tableExists) { LOG.error("No table exists for " + table); return; } //create insert statement sql = "INSERT INTO `" + table + "` ("; sql += "file_name"; List<String> cols = parser.getColumnHeaders(); for (String col: cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); } sql += ") VALUES ("; sql += "?"; for (String col: cols) { sql += ", "; sql += "?"; } sql += ")"; PreparedStatement ps = conn.prepareStatement(sql); List<String> currentBatchStrs = new ArrayList<>(); //load table try { int done = 0; int currentBatchSize = 0; while (parser.nextRecord()) { int col = 1; //file name is always first ps.setString(col++, fileName); for (String colName : cols) { CsvCell cell = parser.getCell(colName); if (cell == null) { ps.setNull(col++, Types.VARCHAR); } else { ps.setString(col++, cell.getString()); } } ps.addBatch(); currentBatchSize++; currentBatchStrs.add((ps.toString())); //for error handling if (currentBatchSize >= 5) { ps.executeBatch(); currentBatchSize = 0; currentBatchStrs.clear(); } done++; if (done % 5000 == 0) { LOG.debug("Done " + done); } } if (currentBatchSize >= 0) { ps.executeBatch(); } ps.close(); } catch (Throwable t) { LOG.error("Failed on batch with statements:"); for (String currentBatchStr: currentBatchStrs) { LOG.error(currentBatchStr); } throw t; } LOG.debug("Finished " + fileType + ": " + filePath); } private static void fixPseudoIds(String subscriberConfig, int threads) { LOG.debug("Fixing Pseudo IDs for " + subscriberConfig); try { //update psuedo ID on patient table //update psuedo ID on person table //update pseudo ID on subscriber_transform mapping table JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); JsonNode saltNode = config.get("pseudonymisation"); ObjectMapper mapper = new ObjectMapper(); Object json = mapper.readValue(saltNode.toString(), Object.class); String linkDistributors = mapper.writeValueAsString(json); LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class); LinkDistributorConfig[] arr = null; JsonNode linkDistributorsNode = config.get("linkedDistributors"); if (linkDistributorsNode != null) { json = mapper.readValue(linkDistributorsNode.toString(), Object.class); linkDistributors = mapper.writeValueAsString(json); arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class); } Connection subscriberConnection = EnterpriseFiler.openConnection(config); List<Long> patientIds = new ArrayList<>(); Map<Long, Long> hmOrgIds = new HashMap<>(); Map<Long, Long> hmPersonIds = new HashMap<>(); String sql = "SELECT id, organization_id, person_id FROM patient"; Statement statement = subscriberConnection.createStatement(); statement.setFetchSize(10000); ResultSet rs = statement.executeQuery(sql); while (rs.next()) { long patientId = rs.getLong(1); long orgId = rs.getLong(2); long personId = rs.getLong(3); patientIds.add(new Long(patientId)); hmOrgIds.put(new Long(patientId), new Long(orgId)); hmPersonIds.put(new Long(patientId), new Long(personId)); } rs.close(); subscriberConnection.close(); LOG.debug("Found " + patientIds.size() + " patients"); AtomicInteger done = new AtomicInteger(); int pos = 0; List<Thread> threadList = new ArrayList<>(); for (int i=0; i<threads; i++) { List<Long> patientSubset = new ArrayList<>(); int count = patientIds.size() / threads; if (i+1 == threads) { count = patientIds.size() - pos; } for (int j=0; j<count; j++) { Long patientId = patientIds.get(pos); patientSubset.add(patientId); pos ++; } FixPseudoIdRunnable runnable = new FixPseudoIdRunnable(subscriberConfig, patientSubset, hmOrgIds, hmPersonIds, done); Thread t = new Thread(runnable); t.start(); threadList.add(t); } while (true) { Thread.sleep(5000); boolean allDone = true; for (Thread t: threadList) { if (t.getState() != Thread.State.TERMINATED) { //if (!t.isAlive()) { allDone = false; break; } } if (allDone) { break; } } LOG.debug("Finished Fixing Pseudo IDs for " + subscriberConfig); } catch (Throwable t) { LOG.error("", t); } } static class FixPseudoIdRunnable implements Runnable { private String subscriberConfig = null; private List<Long> patientIds = null; private Map<Long, Long> hmOrgIds = null; private Map<Long, Long> hmPersonIds = null; private AtomicInteger done = null; public FixPseudoIdRunnable(String subscriberConfig, List<Long> patientIds, Map<Long, Long> hmOrgIds, Map<Long, Long> hmPersonIds, AtomicInteger done) { this.subscriberConfig = subscriberConfig; this.patientIds = patientIds; this.hmOrgIds = hmOrgIds; this.hmPersonIds = hmPersonIds; this.done = done; } @Override public void run() { try { doRun(); } catch (Throwable t) { LOG.error("", t); } } private void doRun() throws Exception { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); Connection subscriberConnection = EnterpriseFiler.openConnection(config); Statement statement = subscriberConnection.createStatement(); JsonNode saltNode = config.get("pseudonymisation"); ObjectMapper mapper = new ObjectMapper(); Object json = mapper.readValue(saltNode.toString(), Object.class); String linkDistributors = mapper.writeValueAsString(json); LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class); LinkDistributorConfig[] arr = null; JsonNode linkDistributorsNode = config.get("linkedDistributors"); if (linkDistributorsNode != null) { json = mapper.readValue(linkDistributorsNode.toString(), Object.class); linkDistributors = mapper.writeValueAsString(json); arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class); } //PseudoIdDalI pseudoIdDal = DalProvider.factoryPseudoIdDal(subscriberConfig); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection subscriberTransformConnection = session.connection(); Statement subscriberTransformStatement = subscriberTransformConnection.createStatement(); String sql = null; ResultSet rs = null; for (Long patientId: patientIds) { Long orgId = hmOrgIds.get(patientId); Long personId = hmPersonIds.get(patientId); //find service ID sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId; rs = subscriberTransformStatement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId); } String serviceId = rs.getString(1); rs.close(); //find patient ID sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId; rs = subscriberTransformStatement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find resource iD for patient ID " + patientId); } String resourceType = rs.getString(1); String resourceId = rs.getString(2); rs.close(); if (!resourceType.equals("Patient")) { throw new Exception("Not a patient resource type for enterprise ID " + patientId); } //get patient Resource resource = null; try { resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.Patient, resourceId); } catch (Exception ex) { throw new Exception("Failed to get patient " + resourceId + " for service " + serviceId, ex); } if (resource == null) { LOG.error("Failed to find patient resource for " + ResourceType.Patient + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); continue; //throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); } Patient patient = (Patient)resource; //generate new pseudo ID String pseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, salt); //save to person if (Strings.isNullOrEmpty(pseudoId)) { sql = "UPDATE person" + " SET pseudo_id = null" + " WHERE id = " + personId; statement.executeUpdate(sql); } else { sql = "UPDATE person" + " SET pseudo_id = '" + pseudoId + "'" + " WHERE id = " + personId; statement.executeUpdate(sql); } //save to patient if (Strings.isNullOrEmpty(pseudoId)) { sql = "UPDATE patient" + " SET pseudo_id = null" + " WHERE id = " + patientId; statement.executeUpdate(sql); } else { sql = "UPDATE patient" + " SET pseudo_id = '" + pseudoId + "'" + " WHERE id = " + patientId; statement.executeUpdate(sql); } //linked distributers if (arr != null) { for (LinkDistributorConfig linked: arr) { String linkedPseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, linked); sql = "INSERT INTO link_distributor (source_skid, target_salt_key_name, target_skid) VALUES ('" + pseudoId + "', '" + linked.getSaltKeyName() + "', '" + linkedPseudoId + "')" + " ON DUPLICATE KEY UPDATE" + " target_salt_key_name = VALUES(target_salt_key_name)," + " target_skid = VALUES(target_skid)"; statement.executeUpdate(sql); } } //save to subscriber transform sql = "DELETE FROM pseudo_id_map WHERE patient_id = '" + resourceId + "'"; subscriberTransformStatement.executeUpdate(sql); if (!Strings.isNullOrEmpty(pseudoId)) { sql = "INSERT INTO pseudo_id_map (patient_id, pseudo_id) VALUES ('" + resourceId + "', '" + pseudoId + "')"; subscriberTransformStatement.executeUpdate(sql); } subscriberConnection.commit(); subscriberTransformConnection.commit(); int doneLocal = done.incrementAndGet(); if (doneLocal % 1000 == 0) { LOG.debug("Done " + doneLocal); } } statement.close(); subscriberTransformStatement.close(); subscriberConnection.close(); subscriberTransformConnection.close(); } } private static void fixDeceasedPatients(String subscriberConfig) { LOG.debug("Fixing Deceased Patients for " + subscriberConfig); try { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); Connection subscriberConnection = EnterpriseFiler.openConnection(config); Map<Long, Long> patientIds = new HashMap<>(); String sql = "SELECT id, organization_id FROM patient WHERE date_of_death IS NOT NULL"; Statement statement = subscriberConnection.createStatement(); ResultSet rs = statement.executeQuery(sql); while (rs.next()) { long patientId = rs.getLong(1); long orgId = rs.getLong(2); patientIds.put(new Long(patientId), new Long(orgId)); } rs.close(); statement.close(); EnterpriseAgeUpdaterlDalI dal = DalProvider.factoryEnterpriseAgeUpdaterlDal(subscriberConfig); EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection subscriberTransformConnection = session.connection(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); for (Long patientId: patientIds.keySet()) { Long orgId = patientIds.get(patientId); statement = subscriberTransformConnection.createStatement(); sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId; rs = statement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId); } String serviceId = rs.getString(1); rs.close(); sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId; rs = statement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find resource iD for patient ID " + patientId); } String resourceType = rs.getString(1); String resourceId = rs.getString(2); rs.close(); statement.close(); Resource resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.valueOf(resourceType), resourceId); if (resource == null) { LOG.error("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); continue; //throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); } Patient patient = (Patient)resource; Date dob = patient.getBirthDate(); Date dod = patient.getDeceasedDateTimeType().getValue(); Integer[] ages = dal.calculateAgeValuesAndUpdateTable(patientId, dob, dod); updateEnterprisePatient(patientId, ages, subscriberConnection); updateEnterprisePerson(patientId, ages, subscriberConnection); } subscriberConnection.close(); subscriberTransformConnection.close(); LOG.debug("Finished Fixing Deceased Patients for " + subscriberConfig); } catch (Throwable t) { LOG.error("", t); } } private static void updateEnterprisePatient(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception { //the enterprise patient database isn't managed using hibernate, so we need to simply write a simple update statement StringBuilder sb = new StringBuilder(); sb.append("UPDATE patient SET "); sb.append("age_years = ?, "); sb.append("age_months = ?, "); sb.append("age_weeks = ? "); sb.append("WHERE id = ?"); PreparedStatement update = connection.prepareStatement(sb.toString()); if (ages[EnterpriseAge.UNIT_YEARS] == null) { update.setNull(1, Types.INTEGER); } else { update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]); } if (ages[EnterpriseAge.UNIT_MONTHS] == null) { update.setNull(2, Types.INTEGER); } else { update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]); } if (ages[EnterpriseAge.UNIT_WEEKS] == null) { update.setNull(3, Types.INTEGER); } else { update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]); } update.setLong(4, enterprisePatientId); update.addBatch(); update.executeBatch(); connection.commit(); LOG.info("Updated patient " + enterprisePatientId + " to ages " + ages[EnterpriseAge.UNIT_YEARS] + " y, " + ages[EnterpriseAge.UNIT_MONTHS] + " m " + ages[EnterpriseAge.UNIT_WEEKS] + " wks"); } private static void updateEnterprisePerson(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception { //update the age fields on the person table where the person is for our patient and their pseudo IDs match StringBuilder sb = new StringBuilder(); sb.append("UPDATE patient, person SET "); sb.append("person.age_years = ?, "); sb.append("person.age_months = ?, "); sb.append("person.age_weeks = ? "); sb.append("WHERE patient.id = ? "); sb.append("AND patient.person_id = person.id "); sb.append("AND patient.pseudo_id = person.pseudo_id"); PreparedStatement update = connection.prepareStatement(sb.toString()); if (ages[EnterpriseAge.UNIT_YEARS] == null) { update.setNull(1, Types.INTEGER); } else { update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]); } if (ages[EnterpriseAge.UNIT_MONTHS] == null) { update.setNull(2, Types.INTEGER); } else { update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]); } if (ages[EnterpriseAge.UNIT_WEEKS] == null) { update.setNull(3, Types.INTEGER); } else { update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]); } update.setLong(4, enterprisePatientId); update.addBatch(); update.executeBatch(); connection.commit(); } private static void testS3Read(String s3BucketName, String keyName, String start, String len) { LOG.debug("Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes"); try { AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder .standard() .withCredentials(DefaultAWSCredentialsProviderChain.getInstance()) .withRegion(Regions.EU_WEST_2); AmazonS3 s3Client = clientBuilder.build(); GetObjectRequest request = new GetObjectRequest(s3BucketName, keyName); long startInt = Long.parseLong(start); long lenInt = Long.parseLong(len); long endInt = startInt + lenInt; request.setRange(startInt, endInt); long startMs = System.currentTimeMillis(); S3Object object = s3Client.getObject(request); InputStream inputStream = object.getObjectContent(); InputStreamReader reader = new InputStreamReader(inputStream, Charset.defaultCharset()); StringBuilder sb = new StringBuilder(); char[] buf = new char[100]; while (true) { int read = reader.read(buf); if (read == -1 || sb.length() >= lenInt) { break; } sb.append(buf, 0, read); } reader.close(); long endMs = System.currentTimeMillis(); LOG.debug("Read " + sb.toString() + " in " + (endMs - startMs) + " ms"); LOG.debug("Finished Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes"); } catch (Throwable t) { LOG.error("", t); } } private static void createTransforMap(UUID serviceId, String table, String outputFile) { LOG.debug("Creating transform map for " + serviceId + " from " + table); try { //retrieve from table EntityManager transformEntityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId); SessionImpl session2 = (SessionImpl)transformEntityManager.getDelegate(); Connection mappingConnection = session2.connection(); EntityManager ehrEntityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session3 = (SessionImpl)ehrEntityManager.getDelegate(); Connection ehrConnection = session3.connection(); String sql = "SELECT resource_type, resource_id, version FROM " + table; Statement statement = mappingConnection.createStatement(); statement.setFetchSize(1000); ResultSet rs = statement.executeQuery(sql); LOG.debug("Got resource IDs from DB"); Map<String, Map<String, List<String>>> hm = new HashMap<>(); int count = 0; //build map up per resource while (rs.next()) { String resourceType = rs.getString("resource_type"); String resourceId = rs.getString("resource_id"); String resourceVersion = rs.getString("version"); /*sql = "SELECT * FROM resource_field_mappings WHERE version = 'a905db26-1357-4710-90ef-474f256567ed';"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*/ /*sql = "SELECT * FROM resource_field_mappings WHERE version = ?"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*/ sql = "SELECT * FROM resource_field_mappings WHERE resource_type = '" + resourceType + "' AND resource_id = '" + resourceId + "' AND version = '" + resourceVersion + "';"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql); //sql = "SELECT * FROM resource_field_mappings WHERE resource_type = ? AND resource_id = ? AND version = ?"; //sql = "SELECT * FROM resource_field_mappings WHERE resource_type = ? AND resource_id = ? AND version = ?"; //statement1.setString(1, resourceVersion); /*statement1.setString(1, resourceType); statement1.setString(2, resourceId); statement1.setString(3, resourceVersion);*/ ResultSet rs1 = null; try { rs1 = statement1.executeQuery(sql); } catch (Exception ex) { LOG.error("" + statement1); throw ex; } rs1.next(); String jsonStr = rs1.getString("mappings_json"); rs1.close(); statement1.close(); sql = "SELECT * FROM resource_history WHERE resource_type = ? AND resource_id = ? AND version = ?"; statement1 = ehrConnection.prepareStatement(sql); statement1.setString(1, resourceType); statement1.setString(2, resourceId); statement1.setString(3, resourceVersion); rs1 = statement1.executeQuery(); if (!rs1.next()) { throw new Exception("Failed to find resource_history for " + statement1.toString()); } String s = rs1.getString("resource_data"); rs1.close(); statement1.close(); if (Strings.isNullOrEmpty(s)) { continue; } JsonNode resourceJson = ObjectMapperPool.getInstance().readTree(s); Map<String, List<String>> hmResourceType = hm.get(resourceType); if (hmResourceType == null) { hmResourceType = new HashMap<>(); hm.put(resourceType, hmResourceType); } JsonNode json = ObjectMapperPool.getInstance().readTree(jsonStr); for (int i=0; i<json.size(); i++) { JsonNode child = json.get(i); JsonNode idNode = child.get("auditId"); JsonNode colsNode = child.get("cols"); if (idNode == null) { throw new Exception("No ID node in " + jsonStr); } if (colsNode == null) { throw new Exception("No cols node in " + jsonStr); } long id = idNode.asLong(); //get source file ID sql = "SELECT * FROM source_file_record WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, id); rs1 = statement1.executeQuery(); rs1.next(); long sourceFileId = rs1.getLong("source_file_id"); rs1.close(); statement1.close(); //get source file type sql = "SELECT * FROM source_file WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileId); rs1 = statement1.executeQuery(); rs1.next(); long sourceFileType = rs1.getLong("source_file_type_id"); rs1.close(); statement1.close(); //get the type desc sql = "SELECT * FROM source_file_type WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileType); rs1 = statement1.executeQuery(); rs1.next(); String fileTypeDesc = rs1.getString("description"); rs1.close(); statement1.close(); //get the cols Map<Integer, String> hmCols = new HashMap<>(); sql = "SELECT * FROM source_file_type_column WHERE source_file_type_id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileType); rs1 = statement1.executeQuery(); while (rs1.next()) { int index = rs1.getInt("column_index"); String name = rs1.getString("column_name"); hmCols.put(new Integer(index), name); } rs1.close(); statement1.close(); for (int j=0; j<colsNode.size(); j++) { JsonNode colNode = colsNode.get(j); int col = colNode.get("col").asInt(); String jsonField = colNode.get("field").asText(); int index = jsonField.indexOf("["); while (index > -1) { int endIndex = jsonField.indexOf("]", index); String prefix = jsonField.substring(0, index + 1); String suffix = jsonField.substring(endIndex); if (prefix.equals("extension[")) { String val = jsonField.substring(index+1, endIndex); int extensionIndex = Integer.parseInt(val); JsonNode extensionArray = resourceJson.get("extension"); JsonNode extensionRoot = extensionArray.get(extensionIndex); String extensionUrl = extensionRoot.get("url").asText(); extensionUrl = extensionUrl.replace("http://endeavourhealth.org/fhir/StructureDefinition/", ""); extensionUrl = extensionUrl.replace("http://hl7.org/fhir/StructureDefinition/", ""); jsonField = prefix + extensionUrl + suffix; } else { jsonField = prefix + "n" + suffix; } index = jsonField.indexOf("[", endIndex); } String colName = hmCols.get(new Integer(col)); String fileTypeAndCol = fileTypeDesc + ":" + colName; List<String> fieldNameMappings = hmResourceType.get(jsonField); if (fieldNameMappings == null) { fieldNameMappings = new ArrayList<>(); hmResourceType.put(jsonField, fieldNameMappings); } if (!fieldNameMappings.contains(fileTypeAndCol)) { fieldNameMappings.add(fileTypeAndCol); } } } count ++; if (count % 500 == 0) { LOG.debug("Done " + count); } } LOG.debug("Done " + count); rs.close(); ehrEntityManager.close(); //create output file List<String> lines = new ArrayList<>(); List<String> resourceTypes = new ArrayList<>(hm.keySet()); Collections.sort(resourceTypes, String.CASE_INSENSITIVE_ORDER); for (String resourceType: resourceTypes) { lines.add("============================================================"); lines.add(resourceType); lines.add("============================================================"); Map<String, List<String>> hmResourceType = hm.get(resourceType); List<String> fields = new ArrayList<>(hmResourceType.keySet()); Collections.sort(fields, String.CASE_INSENSITIVE_ORDER); for (String field: fields) { String linePrefix = field + " = "; List<String> sourceRecords = hmResourceType.get(field); for (String sourceRecord: sourceRecords) { lines.add(linePrefix + sourceRecord); linePrefix = Strings.repeat(" ", linePrefix.length()); } lines.add(""); } lines.add(""); } File f = new File(outputFile); Path p = f.toPath(); Files.write(p, lines, Charset.defaultCharset(), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); LOG.debug("Finished creating transform map from " + table); } catch (Throwable t) { LOG.error("", t); } } private static void fixBartsPatients(UUID serviceId) { LOG.debug("Fixing Barts patients at service " + serviceId); try { EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = session.connection(); int checked = 0; int fixed = 0; ResourceDalI resourceDal = DalProvider.factoryResourceDal(); String sql = "SELECT patient_id FROM patient_search WHERE service_id = '" + serviceId + "';"; Statement s = edsConnection.createStatement(); s.setFetchSize(10000); //don't get all rows at once ResultSet rs = s.executeQuery(sql); LOG.info("Got raw results back"); while (rs.next()) { String patientId = rs.getString(1); ResourceWrapper wrapper = resourceDal.getCurrentVersion(serviceId, ResourceType.Patient.toString(), UUID.fromString(patientId)); if (wrapper == null) { LOG.error("Failed to get recource current for ID " + patientId); continue; } String oldJson = wrapper.getResourceData(); Patient patient = (Patient)FhirSerializationHelper.deserializeResource(oldJson); PatientBuilder patientBuilder = new PatientBuilder(patient); List<String> numbersFromCsv = new ArrayList<>(); if (patient.hasTelecom()) { for (ContactPoint contactPoint: patient.getTelecom()) { if (contactPoint.hasId()) { numbersFromCsv.add(contactPoint.getValue()); } } for (String numberFromCsv: numbersFromCsv) { PPPHOTransformer.removeExistingContactPointWithoutIdByValue(patientBuilder, numberFromCsv); } } List<HumanName> namesFromCsv = new ArrayList<>(); if (patient.hasName()) { for (HumanName name: patient.getName()) { if (name.hasId()) { namesFromCsv.add(name); } } for (HumanName name: namesFromCsv) { PPNAMTransformer.removeExistingNameWithoutIdByValue(patientBuilder, name); } } List<Address> addressesFromCsv = new ArrayList<>(); if (patient.hasAddress()) { for (Address address: patient.getAddress()) { if (address.hasId()) { addressesFromCsv.add(address); } } for (Address address: addressesFromCsv) { PPADDTransformer.removeExistingAddressWithoutIdByValue(patientBuilder, address); } } String newJson = FhirSerializationHelper.serializeResource(patient); if (!newJson.equals(oldJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed ++; } checked ++; if (checked % 1000 == 0) { LOG.debug("Checked " + checked + " fixed " + fixed); } } LOG.debug("Checked " + checked + " fixed " + fixed); rs.close(); s.close(); edsEntityManager.close(); LOG.debug("Finish Fixing Barts patients at service " + serviceId); } catch (Throwable t) { LOG.error("", t); } } private static void postToRabbit(String exchangeName, String srcFile) { LOG.info("Posting to " + exchangeName + " from " + srcFile); try { List<UUID> exchangeIds = new ArrayList<>(); List<String> lines = Files.readAllLines(new File(srcFile).toPath()); for (String line: lines) { if (!Strings.isNullOrEmpty(line)) { try { UUID uuid = UUID.fromString(line); exchangeIds.add(uuid); } catch (Exception ex) { LOG.error("Skipping line " + line); } } } LOG.info("Found " + exchangeIds.size() + " to post to " + exchangeName); continueOrQuit(); LOG.info("Posting " + exchangeIds.size() + " to " + exchangeName); QueueHelper.postToExchange(exchangeIds, exchangeName, null, true); LOG.info("Finished Posting to " + exchangeName+ " from " + srcFile); } catch (Throwable t) { LOG.error("", t); } } /*private static void postToProtocol(String srcFile) { LOG.info("Posting to protocol from " + srcFile); try { List<UUID> exchangeIds = new ArrayList<>(); List<String> lines = Files.readAllLines(new File(srcFile).toPath()); for (String line: lines) { if (!Strings.isNullOrEmpty(line)) { UUID uuid = UUID.fromString(line); exchangeIds.add(uuid); } } LOG.info("Posting " + exchangeIds.size() + " to Protocol queue"); QueueHelper.postToExchange(exchangeIds, "EdsProtocol", null, false); LOG.info("Finished Posting to protocol from " + srcFile); } catch (Throwable t) { LOG.error("", t); } }*/ private static void populateSubscriberUprnTable(String subscriberConfigName) throws Exception { LOG.info("Populating Subscriber UPRN Table for " + subscriberConfigName); try { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); //changed the format of the JSON JsonNode pseudoNode = config.get("pseudonymisation"); boolean pseudonymised = pseudoNode != null; byte[] saltBytes = null; if (pseudonymised) { JsonNode saltNode = pseudoNode.get("salt"); String base64Salt = saltNode.asText(); saltBytes = Base64.getDecoder().decode(base64Salt); } /*boolean pseudonymised = config.get("pseudonymised").asBoolean(); byte[] saltBytes = null; if (pseudonymised) { JsonNode saltNode = config.get("salt"); String base64Salt = saltNode.asText(); saltBytes = Base64.getDecoder().decode(base64Salt); }*/ Connection subscriberConnection = EnterpriseFiler.openConnection(config); String upsertSql; if (pseudonymised) { upsertSql = "INSERT INTO patient_uprn" + " (patient_id, organization_id, person_id, lsoa_code, pseudo_uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode)" + " VALUES" + " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + " ON DUPLICATE KEY UPDATE" + " organization_id = VALUES(organization_id)," + " person_id = VALUES(person_id)," + " lsoa_code = VALUES(lsoa_code)," + " pseudo_uprn = VALUES(pseudo_uprn)," + " qualifier = VALUES(qualifier)," + " `algorithm` = VALUES(`algorithm`)," + " `match` = VALUES(`match`)," + " no_address = VALUES(no_address)," + " invalid_address = VALUES(invalid_address)," + " missing_postcode = VALUES(missing_postcode)," + " invalid_postcode = VALUES(invalid_postcode)"; } else { upsertSql = "INSERT INTO patient_uprn" + " (patient_id, organization_id, person_id, lsoa_code, uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode)" + " VALUES" + " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + " ON DUPLICATE KEY UPDATE" + " organization_id = VALUES(organization_id)," + " person_id = VALUES(person_id)," + " lsoa_code = VALUES(lsoa_code)," + " uprn = VALUES(uprn)," + " qualifier = VALUES(qualifier)," + " `algorithm` = VALUES(`algorithm`)," + " `match` = VALUES(`match`)," + " no_address = VALUES(no_address)," + " invalid_address = VALUES(invalid_address)," + " missing_postcode = VALUES(missing_postcode)," + " invalid_postcode = VALUES(invalid_postcode)"; } PreparedStatement psUpsert = subscriberConnection.prepareStatement(upsertSql); int inBatch = 0; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = session.connection(); EnterpriseIdDalI enterpriseIdDal = DalProvider.factoryEnterpriseIdDal(subscriberConfigName); PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal(); PostcodeDalI postcodeDal = DalProvider.factoryPostcodeDal(); int checked = 0; int saved = 0; String sql = "SELECT service_id, patient_id, uprn, qualifier, abp_address, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode FROM patient_address_uprn"; Statement s = edsConnection.createStatement(); s.setFetchSize(10000); //don't get all rows at once ResultSet rs = s.executeQuery(sql); LOG.info("Got raw results back"); while (rs.next()) { int col = 1; String serviceId = rs.getString(col++); String patientId = rs.getString(col++); Long uprn = rs.getLong(col++); if (rs.wasNull()) { uprn = null; } String qualifier = rs.getString(col++); String abpAddress = rs.getString(col++); String algorithm = rs.getString(col++); String match = rs.getString(col++); boolean noAddress = rs.getBoolean(col++); boolean invalidAddress = rs.getBoolean(col++); boolean missingPostcode = rs.getBoolean(col++); boolean invalidPostcode = rs.getBoolean(col++); //check if patient ID already exists in the subscriber DB Long subscriberPatientId = enterpriseIdDal.findEnterpriseId(ResourceType.Patient.toString(), patientId); //if the patient doesn't exist on this subscriber DB, then don't transform this record if (subscriberPatientId != null) { Long subscriberOrgId = enterpriseIdDal.findEnterpriseOrganisationId(serviceId); String discoveryPersonId = patientLinkDal.getPersonId(patientId); Long subscriberPersonId = enterpriseIdDal.findOrCreateEnterprisePersonId(discoveryPersonId); String lsoaCode = null; if (!Strings.isNullOrEmpty(abpAddress)) { String[] toks = abpAddress.split(" "); String postcode = toks[toks.length - 1]; PostcodeLookup postcodeReference = postcodeDal.getPostcodeReference(postcode); if (postcodeReference != null) { lsoaCode = postcodeReference.getLsoaCode(); } } col = 1; psUpsert.setLong(col++, subscriberPatientId); psUpsert.setLong(col++, subscriberOrgId); psUpsert.setLong(col++, subscriberPersonId); psUpsert.setString(col++, lsoaCode); if (pseudonymised) { String pseuoUprn = null; if (uprn != null) { TreeMap<String, String> keys = new TreeMap<>(); keys.put("UPRN", "" + uprn); Crypto crypto = new Crypto(); crypto.SetEncryptedSalt(saltBytes); pseuoUprn = crypto.GetDigest(keys); } psUpsert.setString(col++, pseuoUprn); } else { if (uprn != null) { psUpsert.setLong(col++, uprn.longValue()); } else { psUpsert.setNull(col++, Types.BIGINT); } } psUpsert.setString(col++, qualifier); psUpsert.setString(col++, algorithm); psUpsert.setString(col++, match); psUpsert.setBoolean(col++, noAddress); psUpsert.setBoolean(col++, invalidAddress); psUpsert.setBoolean(col++, missingPostcode); psUpsert.setBoolean(col++, invalidPostcode); //LOG.debug("" + psUpsert); psUpsert.addBatch(); inBatch++; saved++; if (inBatch >= TransformConfig.instance().getResourceSaveBatchSize()) { psUpsert.executeBatch(); subscriberConnection.commit(); inBatch = 0; } } checked ++; if (checked % 1000 == 0) { LOG.info("Checked " + checked + " Saved " + saved); } } if (inBatch > 0) { psUpsert.executeBatch(); subscriberConnection.commit(); } LOG.info("Chcked " + checked + " Saved " + saved); psUpsert.close(); subscriberConnection.close(); edsEntityManager.close(); LOG.info("Finished Populating Subscriber UPRN Table for " + subscriberConfigName); } catch (Throwable t) { LOG.error("", t); } } private static void fixPersonsNoNhsNumber() { LOG.info("Fixing persons with no NHS number"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); EntityManager entityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection patientSearchConnection = session.connection(); Statement patientSearchStatement = patientSearchConnection.createStatement(); for (Service service: services) { LOG.info("Doing " + service.getName() + " " + service.getId()); int checked = 0; int fixedPersons = 0; int fixedSearches = 0; String sql = "SELECT patient_id, nhs_number FROM patient_search WHERE service_id = '" + service.getId() + "' AND (nhs_number IS NULL or CHAR_LENGTH(nhs_number) != 10)"; ResultSet rs = patientSearchStatement.executeQuery(sql); while (rs.next()) { String patientId = rs.getString(1); String nhsNumber = rs.getString(2); //find matched person ID String personIdSql = "SELECT person_id FROM patient_link WHERE patient_id = '" + patientId + "'"; Statement s = patientSearchConnection.createStatement(); ResultSet rsPersonId = s.executeQuery(personIdSql); String personId = null; if (rsPersonId.next()) { personId = rsPersonId.getString(1); } rsPersonId.close(); s.close(); if (Strings.isNullOrEmpty(personId)) { LOG.error("Patient " + patientId + " has no person ID"); continue; } //see whether person ID used NHS number to match String patientLinkSql = "SELECT nhs_number FROM patient_link_person WHERE person_id = '" + personId + "'"; s = patientSearchConnection.createStatement(); ResultSet rsPatientLink = s.executeQuery(patientLinkSql); String matchingNhsNumber = null; if (rsPatientLink.next()) { matchingNhsNumber = rsPatientLink.getString(1); } rsPatientLink.close(); s.close(); //if patient link person has a record for this nhs number, update the person link if (!Strings.isNullOrEmpty(matchingNhsNumber)) { String newPersonId = UUID.randomUUID().toString(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String createdAtStr = sdf.format(new Date()); s = patientSearchConnection.createStatement(); //new record in patient link history String patientHistorySql = "INSERT INTO patient_link_history VALUES ('" + patientId + "', '" + service.getId() + "', '" + createdAtStr + "', '" + newPersonId + "', '" + personId + "')"; //LOG.debug(patientHistorySql); s.execute(patientHistorySql); //update patient link String patientLinkUpdateSql = "UPDATE patient_link SET person_id = '" + newPersonId + "' WHERE patient_id = '" + patientId + "'"; s.execute(patientLinkUpdateSql); patientSearchConnection.commit(); s.close(); fixedPersons ++; } //if patient search has an invalid NHS number, update it if (!Strings.isNullOrEmpty(nhsNumber)) { ResourceDalI resourceDal = DalProvider.factoryResourceDal(); Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(service.getId(), ResourceType.Patient, patientId); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); patientSearchDal.update(service.getId(), patient); fixedSearches ++; } checked ++; if (checked % 50 == 0) { LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches); } } LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches); rs.close(); } patientSearchStatement.close(); entityManager.close(); LOG.info("Finished fixing persons with no NHS number"); } catch (Throwable t) { LOG.error("", t); } } private static void checkDeletedObs(UUID serviceId, UUID systemId) { LOG.info("Checking Observations for " + serviceId); try { ResourceDalI resourceDal = DalProvider.factoryResourceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); List<ResourceType> potentialResourceTypes = new ArrayList<>(); potentialResourceTypes.add(ResourceType.Procedure); potentialResourceTypes.add(ResourceType.AllergyIntolerance); potentialResourceTypes.add(ResourceType.FamilyMemberHistory); potentialResourceTypes.add(ResourceType.Immunization); potentialResourceTypes.add(ResourceType.DiagnosticOrder); potentialResourceTypes.add(ResourceType.Specimen); potentialResourceTypes.add(ResourceType.DiagnosticReport); potentialResourceTypes.add(ResourceType.ReferralRequest); potentialResourceTypes.add(ResourceType.Condition); potentialResourceTypes.add(ResourceType.Observation); List<String> subscriberConfigs = new ArrayList<>(); subscriberConfigs.add("ceg_data_checking"); subscriberConfigs.add("ceg_enterprise"); subscriberConfigs.add("hurley_data_checking"); subscriberConfigs.add("hurley_deidentified"); Set<String> observationsNotDeleted = new HashSet<>(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (Exchange exchange : exchanges) { List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); ExchangePayloadFile firstItem = payload.get(0); //String version = EmisCsvToFhirTransformer.determineVersion(payload); //if we've reached the point before we process data for this practice, break out try { if (!EmisCsvToFhirTransformer.shouldProcessPatientData(payload)) { break; } } catch (TransformException e) { LOG.info("Skipping exchange containing " + firstItem.getPath()); continue; } String name = FilenameUtils.getBaseName(firstItem.getPath()); String[] toks = name.split("_"); String agreementId = toks[4]; LOG.info("Doing exchange containing " + firstItem.getPath()); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true); Map<UUID, ExchangeBatch> hmBatchesByPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId()); for (ExchangeBatch batch : batches) { if (batch.getEdsPatientId() != null) { hmBatchesByPatient.put(batch.getEdsPatientId(), batch); } } for (ExchangePayloadFile item : payload) { String type = item.getType(); if (type.equals("CareRecord_Observation")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String deleted = record.get("Deleted"); String observationId = record.get("ObservationGuid"); if (deleted.equalsIgnoreCase("true")) { //if observation was reinstated at some point, skip it if (observationsNotDeleted.contains(observationId)) { continue; } String patientId = record.get("PatientGuid"); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientId); CsvCell observationCell = CsvCell.factoryDummyWrapper(observationId); Set<ResourceType> resourceTypes = org.endeavourhealth.transform.emis.csv.transforms.careRecord.ObservationTransformer.findOriginalTargetResourceTypes(csvHelper, patientCell, observationCell); for (ResourceType resourceType: resourceTypes) { //will already have been done OK if (resourceType == ResourceType.Observation) { continue; } String sourceId = patientId + ":" + observationId; UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId); if (uuid == null) { throw new Exception("Failed to find UUID for " + resourceType + " " + sourceId); } LOG.debug("Fixing " + resourceType + " " + uuid); //create file of IDs to delete for each subscriber DB for (String subscriberConfig : subscriberConfigs) { EnterpriseIdDalI subscriberDal = DalProvider.factoryEnterpriseIdDal(subscriberConfig); Long enterpriseId = subscriberDal.findEnterpriseId(resourceType.toString(), uuid.toString()); if (enterpriseId == null) { continue; } String sql = null; if (resourceType == ResourceType.AllergyIntolerance) { sql = "DELETE FROM allergy_intolerance WHERE id = " + enterpriseId; } else if (resourceType == ResourceType.ReferralRequest) { sql = "DELETE FROM referral_request WHERE id = " + enterpriseId; } else { sql = "DELETE FROM observation WHERE id = " + enterpriseId; } sql += "\n"; File f = new File(subscriberConfig + ".sql"); Files.write(f.toPath(), sql.getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); } //delete resource if not already done ResourceWrapper resourceWrapper = resourceDal.getCurrentVersion(serviceId, resourceType.toString(), uuid); if (resourceWrapper != null && !resourceWrapper.isDeleted()) { ExchangeBatch batch = hmBatchesByPatient.get(resourceWrapper.getPatientId()); resourceWrapper.setDeleted(true); resourceWrapper.setResourceData(null); resourceWrapper.setResourceMetadata(""); resourceWrapper.setExchangeBatchId(batch.getBatchId()); resourceWrapper.setVersion(UUID.randomUUID()); resourceWrapper.setCreatedAt(new Date()); resourceWrapper.setExchangeId(exchange.getId()); resourceDal.delete(resourceWrapper); } } } else { observationsNotDeleted.add(observationId); } } parser.close(); } } } LOG.info("Finished Checking Observations for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } } private static void testBatchInserts(String url, String user, String pass, String num, String batchSizeStr) { LOG.info("Testing Batch Inserts"); try { int inserts = Integer.parseInt(num); int batchSize = Integer.parseInt(batchSizeStr); LOG.info("Openning Connection"); Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", pass); Connection conn = DriverManager.getConnection(url, props); //String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?);"; String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?)"; PreparedStatement ps = conn.prepareStatement(sql); if (batchSize == 1) { LOG.info("Testing non-batched inserts"); long start = System.currentTimeMillis(); for (int i = 0; i < inserts; i++) { int col = 1; ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, randomStr()); ps.execute(); } long end = System.currentTimeMillis(); LOG.info("Done " + inserts + " in " + (end - start) + " ms"); } else { LOG.info("Testing batched inserts with batch size " + batchSize); long start = System.currentTimeMillis(); for (int i = 0; i < inserts; i++) { int col = 1; ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, randomStr()); ps.addBatch(); if ((i + 1) % batchSize == 0 || i + 1 >= inserts) { ps.executeBatch(); } } long end = System.currentTimeMillis(); LOG.info("Done " + inserts + " in " + (end - start) + " ms"); } ps.close(); conn.close(); LOG.info("Finished Testing Batch Inserts"); } catch (Exception ex) { LOG.error("", ex); } } private static String randomStr() { StringBuffer sb = new StringBuffer(); Random r = new Random(System.currentTimeMillis()); while (sb.length() < 1100) { sb.append(r.nextLong()); } return sb.toString(); } /*private static void fixEmisProblems(UUID serviceId, UUID systemId) { LOG.info("Fixing Emis Problems for " + serviceId); try { Map<String, List<String>> hmReferences = new HashMap<>(); Set<String> patientIds = new HashSet<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null); LOG.info("Caching problem links"); //Go through all files to work out problem children for every problem ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); //String version = EmisCsvToFhirTransformer.determineVersion(payload); ExchangePayloadFile firstItem = payload.get(0); String name = FilenameUtils.getBaseName(firstItem.getPath()); String[] toks = name.split("_"); String agreementId = toks[4]; LOG.info("Doing exchange containing " + firstItem.getPath()); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true); for (ExchangePayloadFile item: payload) { String type = item.getType(); if (type.equals("CareRecord_Observation")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("ObservationGuid"); String localId = patientId + ":" + observationId; ResourceType resourceType = ObservationTransformer.findOriginalTargetResourceType(filer, CsvCell.factoryDummyWrapper(patientId), CsvCell.factoryDummyWrapper(observationId)); Reference localReference = ReferenceHelper.createReference(resourceType, localId); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else if (type.equals("Prescribing_DrugRecord")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemObservationGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("DrugRecordGuid"); String localId = patientId + ":" + observationId; Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, localId); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else if (type.equals("Prescribing_IssueRecord")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemObservationGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("IssueRecordGuid"); String localId = patientId + ":" + observationId; Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, localId); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else { //no problem link } } } LOG.info("Finished caching problem links, finding " + patientIds.size() + " patients"); int done = 0; int fixed = 0; for (String localPatientId: patientIds) { Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId); Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer); String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference); List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), ResourceType.Condition.toString()); for (ResourceWrapper wrapper: wrappers) { if (wrapper.isDeleted()) { continue; } String originalJson = wrapper.getResourceData(); Condition condition = (Condition)FhirSerializationHelper.deserializeResource(originalJson); ConditionBuilder conditionBuilder = new ConditionBuilder(condition); //sort out the nested extension references Extension outerExtension = ExtensionConverter.findExtension(condition, FhirExtensionUri.PROBLEM_LAST_REVIEWED); if (outerExtension != null) { Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_LAST_REVIEWED__PERFORMER); if (innerExtension != null) { Reference performerReference = (Reference)innerExtension.getValue(); String value = performerReference.getReference(); if (value.endsWith("}")) { Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer); innerExtension.setValue(globalPerformerReference); } } } //sort out the contained list of children ContainedListBuilder listBuilder = new ContainedListBuilder(conditionBuilder); //remove any existing children listBuilder.removeContainedList(); //add all the new ones we've found List<String> localChildReferences = hmReferences.get(wrapper.getResourceId().toString()); if (localChildReferences != null) { for (String localChildReference: localChildReferences) { Reference reference = ReferenceHelper.createReference(localChildReference); listBuilder.addContainedListItem(reference); } } //save the updated condition String newJson = FhirSerializationHelper.serializeResource(condition); if (!newJson.equals(originalJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed ++; } } done ++; if (done % 1000 == 0) { LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); } } LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); LOG.info("Finished Emis Problems for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } }*/ private static void fixEmisProblems3ForPublisher(String publisher, UUID systemId) { try { LOG.info("Doing fix for " + publisher); String[] done = new String[]{ "01fcfe94-5dfd-4951-b74d-129f874209b0", "07a267d3-189b-4968-b9b0-547de28edef5", "0b9601d1-f7ab-4f5d-9f77-1841050f75ab", "0fd2ff5d-2c25-4707-afe8-707e81a250b8", "14276da8-c344-4841-a36d-aa38940e78e7", "158251ca-0e1d-4471-8fae-250b875911e1", "160131e2-a5ff-49c8-b62e-ae499a096193", "16490f2b-62ce-44c6-9816-528146272340", "18fa1bed-b9a0-4d55-a0cc-dfc31831259a", "19cba169-d41e-424a-812f-575625c72305", "19ff6a03-25df-4e61-9ab1-4573cfd24729", "1b3d1627-f49e-4103-92d6-af6016476da3", "1e198fbb-c9cd-429a-9b50-0f124d0d825c", "20444fbe-0802-46fc-8203-339a36f52215", "21e27bf3-8071-48dd-924f-1d8d21f9216f", "23203e72-a3b0-4577-9942-30f7cdff358e", "23be1f4a-68ec-4a49-b2ec-aa9109c99dcd", "2b56033f-a9b4-4bab-bb53-c619bdb38895", "2ba26f2d-8068-4b77-8e62-431edfc2c2e2", "2ed89931-0ce7-49ea-88ac-7266b6c03be0", "3abf8ded-f1b1-495b-9a2d-5d0223e33fa7", "3b0f6720-2ffd-4f8a-afcd-7e3bb311212d", "415b509a-cf39-45bc-9acf-7f982a00e159", "4221276f-a3b0-4992-b426-ec2d8c7347f2", "49868211-d868-4b55-a201-5acac0be0cc0", "55fdcbd0-9b2d-493a-b874-865ccc93a156", "56124545-d266-4da9-ba1f-b3a16edc7f31", "6c11453b-dbf8-4749-a0ec-ab705920e316" }; ServiceDalI dal = DalProvider.factoryServiceDal(); List<Service> all = dal.getAll(); for (Service service: all) { if (service.getPublisherConfigName() != null && service.getPublisherConfigName().equals(publisher)) { boolean alreadyDone = false; String idStr = service.getId().toString(); for (String doneId: done) { if (idStr.equalsIgnoreCase(doneId)) { alreadyDone = true; break; } } if (alreadyDone) { continue; } fixEmisProblems3(service.getId(), systemId); } } LOG.info("Done fix for " + publisher); } catch (Throwable t) { LOG.error("", t); } } private static void fixEmisProblems3(UUID serviceId, UUID systemId) { LOG.info("Fixing Emis Problems 3 for " + serviceId); try { Set<String> patientIds = new HashSet<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null); LOG.info("Finding patients"); //Go through all files to work out problem children for every problem ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); for (ExchangePayloadFile item: payload) { String type = item.getType(); if (type.equals("Admin_Patient")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String patientId = record.get("PatientGuid"); patientIds.add(patientId); } parser.close(); } } } LOG.info("Finished checking files, finding " + patientIds.size() + " patients"); int done = 0; int fixed = 0; for (String localPatientId: patientIds) { Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId); Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer); String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference); List<ResourceType> potentialResourceTypes = new ArrayList<>(); potentialResourceTypes.add(ResourceType.Procedure); potentialResourceTypes.add(ResourceType.AllergyIntolerance); potentialResourceTypes.add(ResourceType.FamilyMemberHistory); potentialResourceTypes.add(ResourceType.Immunization); potentialResourceTypes.add(ResourceType.DiagnosticOrder); potentialResourceTypes.add(ResourceType.Specimen); potentialResourceTypes.add(ResourceType.DiagnosticReport); potentialResourceTypes.add(ResourceType.ReferralRequest); potentialResourceTypes.add(ResourceType.Condition); potentialResourceTypes.add(ResourceType.Observation); for (ResourceType resourceType: potentialResourceTypes) { List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), resourceType.toString()); for (ResourceWrapper wrapper : wrappers) { if (wrapper.isDeleted()) { continue; } String originalJson = wrapper.getResourceData(); DomainResource resource = (DomainResource)FhirSerializationHelper.deserializeResource(originalJson); //Also go through all observation records and any that have parent observations - these need fixing too??? Extension extension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PARENT_RESOURCE); if (extension != null) { Reference reference = (Reference)extension.getValue(); fixReference(serviceId, filer, reference, potentialResourceTypes); } if (resource instanceof Observation) { Observation obs = (Observation)resource; if (obs.hasRelated()) { for (Observation.ObservationRelatedComponent related: obs.getRelated()) { if (related.hasTarget()) { Reference reference = related.getTarget(); fixReference(serviceId, filer, reference, potentialResourceTypes); } } } } if (resource instanceof DiagnosticReport) { DiagnosticReport diag = (DiagnosticReport)resource; if (diag.hasResult()) { for (Reference reference: diag.getResult()) { fixReference(serviceId, filer, reference, potentialResourceTypes); } } } //Go through all patients, go through all problems, for any child that's Observation, find the true resource type then update and save if (resource instanceof Condition) { if (resource.hasContained()) { for (Resource contained: resource.getContained()) { if (contained.getId().equals("Items")) { List_ containedList = (List_)contained; if (containedList.hasEntry()) { for (List_.ListEntryComponent entry: containedList.getEntry()) { Reference reference = entry.getItem(); fixReference(serviceId, filer, reference, potentialResourceTypes); } } } } } //sort out the nested extension references Extension outerExtension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PROBLEM_RELATED); if (outerExtension != null) { Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_RELATED__TARGET); if (innerExtension != null) { Reference performerReference = (Reference)innerExtension.getValue(); String value = performerReference.getReference(); if (value.endsWith("}")) { Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer); innerExtension.setValue(globalPerformerReference); } } } } //save the updated condition String newJson = FhirSerializationHelper.serializeResource(resource); if (!newJson.equals(originalJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed++; } } } done ++; if (done % 1000 == 0) { LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); } } LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); LOG.info("Finished Emis Problems 3 for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } } private static boolean fixReference(UUID serviceId, HasServiceSystemAndExchangeIdI csvHelper, Reference reference, List<ResourceType> potentialResourceTypes) throws Exception { //if it's already something other than observation, we're OK ReferenceComponents comps = ReferenceHelper.getReferenceComponents(reference); if (comps.getResourceType() != ResourceType.Observation) { return false; } Reference sourceReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, reference); String sourceId = ReferenceHelper.getReferenceId(sourceReference); String newReferenceValue = findTrueResourceType(serviceId, potentialResourceTypes, sourceId); if (newReferenceValue == null) { return false; } reference.setReference(newReferenceValue); return true; } private static String findTrueResourceType(UUID serviceId, List<ResourceType> potentials, String sourceId) throws Exception { ResourceDalI dal = DalProvider.factoryResourceDal(); for (ResourceType resourceType: potentials) { UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId); if (uuid == null) { continue; } ResourceWrapper wrapper = dal.getCurrentVersion(serviceId, resourceType.toString(), uuid); if (wrapper != null) { return ReferenceHelper.createResourceReference(resourceType, uuid.toString()); } } return null; } /*private static void convertExchangeBody(UUID systemUuid) { try { LOG.info("Converting exchange bodies for system " + systemUuid); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemUuid, Integer.MAX_VALUE); if (exchanges.isEmpty()) { continue; } LOG.debug("doing " + service.getName() + " with " + exchanges.size() + " exchanges"); for (Exchange exchange: exchanges) { String exchangeBody = exchange.getBody(); try { //already done ExchangePayloadFile[] files = JsonSerializer.deserialize(exchangeBody, ExchangePayloadFile[].class); continue; } catch (JsonSyntaxException ex) { //if the JSON can't be parsed, then it'll be the old format of body that isn't JSON } List<ExchangePayloadFile> newFiles = new ArrayList<>(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); for (String file: files) { ExchangePayloadFile fileObj = new ExchangePayloadFile(); String fileWithoutSharedStorage = file.substring(TransformConfig.instance().getSharedStoragePath().length()+1); fileObj.setPath(fileWithoutSharedStorage); //size List<FileInfo> fileInfos = FileHelper.listFilesInSharedStorageWithInfo(file); for (FileInfo info: fileInfos) { if (info.getFilePath().equals(file)) { long size = info.getSize(); fileObj.setSize(new Long(size)); } } //type if (systemUuid.toString().equalsIgnoreCase("991a9068-01d3-4ff2-86ed-249bd0541fb3") //live || systemUuid.toString().equalsIgnoreCase("55c08fa5-ef1e-4e94-aadc-e3d6adc80774")) { //dev //emis String name = FilenameUtils.getName(file); String[] toks = name.split("_"); String first = toks[1]; String second = toks[2]; fileObj.setType(first + "_" + second); *//* } else if (systemUuid.toString().equalsIgnoreCase("e517fa69-348a-45e9-a113-d9b59ad13095") || systemUuid.toString().equalsIgnoreCase("b0277098-0b6c-4d9d-86ef-5f399fb25f34")) { //dev //cerner String name = FilenameUtils.getName(file); if (Strings.isNullOrEmpty(name)) { continue; } try { String type = BartsCsvToFhirTransformer.identifyFileType(name); fileObj.setType(type); } catch (Exception ex2) { throw new Exception("Failed to parse file name " + name + " on exchange " + exchange.getId()); }*//* } else { throw new Exception("Unknown system ID " + systemUuid); } newFiles.add(fileObj); } String json = JsonSerializer.serialize(newFiles); exchange.setBody(json); exchangeDal.save(exchange); } } LOG.info("Finished Converting exchange bodies for system " + systemUuid); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixBartsOrgs(String serviceId) { try { LOG.info("Fixing Barts orgs"); ResourceDalI dal = DalProvider.factoryResourceDal(); List<ResourceWrapper> wrappers = dal.getResourcesByService(UUID.fromString(serviceId), ResourceType.Organization.toString()); LOG.debug("Found " + wrappers.size() + " resources"); int done = 0; int fixed = 0; for (ResourceWrapper wrapper: wrappers) { if (!wrapper.isDeleted()) { List<ResourceWrapper> history = dal.getResourceHistory(UUID.fromString(serviceId), wrapper.getResourceType(), wrapper.getResourceId()); ResourceWrapper mostRecent = history.get(0); String json = mostRecent.getResourceData(); Organization org = (Organization)FhirSerializationHelper.deserializeResource(json); String odsCode = IdentifierHelper.findOdsCode(org); if (Strings.isNullOrEmpty(odsCode) && org.hasIdentifier()) { boolean hasBeenFixed = false; for (Identifier identifier: org.getIdentifier()) { if (identifier.getSystem().equals(FhirIdentifierUri.IDENTIFIER_SYSTEM_ODS_CODE) && identifier.hasId()) { odsCode = identifier.getId(); identifier.setValue(odsCode); identifier.setId(null); hasBeenFixed = true; } } if (hasBeenFixed) { String newJson = FhirSerializationHelper.serializeResource(org); mostRecent.setResourceData(newJson); LOG.debug("Fixed Organization " + org.getId()); *//*LOG.debug(json); LOG.debug(newJson);*//* saveResourceWrapper(UUID.fromString(serviceId), mostRecent); fixed ++; } } } done ++; if (done % 100 == 0) { LOG.debug("Done " + done + ", Fixed " + fixed); } } LOG.debug("Done " + done + ", Fixed " + fixed); LOG.info("Finished Barts orgs"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testPreparedStatements(String url, String user, String pass, String serviceId) { try { LOG.info("Testing Prepared Statements"); LOG.info("Url: " + url); LOG.info("user: " + user); LOG.info("pass: " + pass); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); //create connection Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", pass); Connection conn = DriverManager.getConnection(url, props); String sql = "SELECT * FROM internal_id_map WHERE service_id = ? AND id_type = ? AND source_id = ?"; long start = System.currentTimeMillis(); for (int i=0; i<10000; i++) { PreparedStatement ps = null; try { ps = conn.prepareStatement(sql); ps.setString(1, serviceId); ps.setString(2, "MILLPERSIDtoMRN"); ps.setString(3, UUID.randomUUID().toString()); ResultSet rs = ps.executeQuery(); while (rs.next()) { //do nothing } } finally { if (ps != null) { ps.close(); } } } long end = System.currentTimeMillis(); LOG.info("Took " + (end-start) + " ms"); //close connection conn.close(); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixEncounters(String table) { LOG.info("Fixing encounters from " + table); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); Date cutoff = sdf.parse("2018-03-14 11:42"); EntityManager entityManager = ConnectionManager.getAdminEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); List<UUID> serviceIds = new ArrayList<>(); Map<UUID, UUID> hmSystems = new HashMap<>(); String sql = "SELECT service_id, system_id FROM " + table + " WHERE done = 0"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { UUID serviceId = UUID.fromString(rs.getString(1)); UUID systemId = UUID.fromString(rs.getString(2)); serviceIds.add(serviceId); hmSystems.put(serviceId, systemId); } rs.close(); statement.close(); entityManager.close(); for (UUID serviceId: serviceIds) { UUID systemId = hmSystems.get(serviceId); LOG.info("Doing service " + serviceId + " and system " + systemId); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, systemId); List<UUID> exchangeIdsToProcess = new ArrayList<>(); for (UUID exchangeId: exchangeIds) { List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId); for (ExchangeTransformAudit audit: audits) { Date d = audit.getStarted(); if (d.after(cutoff)) { exchangeIdsToProcess.add(exchangeId); break; } } } Map<String, ReferenceList> consultationNewChildMap = new HashMap<>(); Map<String, ReferenceList> observationChildMap = new HashMap<>(); Map<String, ReferenceList> newProblemChildren = new HashMap<>(); for (UUID exchangeId: exchangeIdsToProcess) { Exchange exchange = exchangeDal.getExchange(exchangeId); String[] files = ExchangeHelper.parseExchangeBodyIntoFileList(exchange.getBody()); String version = EmisCsvToFhirTransformer.determineVersion(files); List<String> interestingFiles = new ArrayList<>(); for (String file: files) { if (file.indexOf("CareRecord_Consultation") > -1 || file.indexOf("CareRecord_Observation") > -1 || file.indexOf("CareRecord_Diary") > -1 || file.indexOf("Prescribing_DrugRecord") > -1 || file.indexOf("Prescribing_IssueRecord") > -1 || file.indexOf("CareRecord_Problem") > -1) { interestingFiles.add(file); } } files = interestingFiles.toArray(new String[0]); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchangeId, files, version, parsers); String dataSharingAgreementGuid = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(parsers); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchangeId, dataSharingAgreementGuid, true); Consultation consultationParser = (Consultation)parsers.get(Consultation.class); while (consultationParser.nextRecord()) { CsvCell consultationGuid = consultationParser.getConsultationGuid(); CsvCell patientGuid = consultationParser.getPatientGuid(); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); consultationNewChildMap.put(sourceId, new ReferenceList()); } Problem problemParser = (Problem)parsers.get(Problem.class); while (problemParser.nextRecord()) { CsvCell problemGuid = problemParser.getObservationGuid(); CsvCell patientGuid = problemParser.getPatientGuid(); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); newProblemChildren.put(sourceId, new ReferenceList()); } //run this pre-transformer to pre-cache some stuff in the csv helper, which //is needed when working out the resource type that each observation would be saved as ObservationPreTransformer.transform(version, parsers, null, csvHelper); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { CsvCell observationGuid = observationParser.getObservationGuid(); CsvCell patientGuid = observationParser.getPatientGuid(); String obSourceId = EmisCsvHelper.createUniqueId(patientGuid, observationGuid); CsvCell codeId = observationParser.getCodeId(); if (codeId.isEmpty()) { continue; } ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper); UUID obUuid = IdHelper.getEdsResourceId(serviceId, resourceType, obSourceId); if (obUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + resourceType + " and source ID " + obSourceId); //resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper); } Reference obReference = ReferenceHelper.createReference(resourceType, obUuid.toString()); CsvCell consultationGuid = observationParser.getConsultationGuid(); if (!consultationGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); ReferenceList referenceList = consultationNewChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); consultationNewChildMap.put(sourceId, referenceList); } referenceList.add(obReference); } CsvCell problemGuid = observationParser.getProblemGuid(); if (!problemGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(obReference); } CsvCell parentObGuid = observationParser.getParentObservationGuid(); if (!parentObGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, parentObGuid); ReferenceList referenceList = observationChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); observationChildMap.put(sourceId, referenceList); } referenceList.add(obReference); } } Diary diaryParser = (Diary)parsers.get(Diary.class); while (diaryParser.nextRecord()) { CsvCell consultationGuid = diaryParser.getConsultationGuid(); if (!consultationGuid.isEmpty()) { CsvCell diaryGuid = diaryParser.getDiaryGuid(); CsvCell patientGuid = diaryParser.getPatientGuid(); String diarySourceId = EmisCsvHelper.createUniqueId(patientGuid, diaryGuid); UUID diaryUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.ProcedureRequest, diarySourceId); if (diaryUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.ProcedureRequest + " and source ID " + diarySourceId); } Reference diaryReference = ReferenceHelper.createReference(ResourceType.ProcedureRequest, diaryUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); ReferenceList referenceList = consultationNewChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); consultationNewChildMap.put(sourceId, referenceList); } referenceList.add(diaryReference); } } IssueRecord issueRecordParser = (IssueRecord)parsers.get(IssueRecord.class); while (issueRecordParser.nextRecord()) { CsvCell problemGuid = issueRecordParser.getProblemObservationGuid(); if (!problemGuid.isEmpty()) { CsvCell issueRecordGuid = issueRecordParser.getIssueRecordGuid(); CsvCell patientGuid = issueRecordParser.getPatientGuid(); String issueRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, issueRecordGuid); UUID issueRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationOrder, issueRecordSourceId); if (issueRecordUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.MedicationOrder + " and source ID " + issueRecordSourceId); } Reference issueRecordReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, issueRecordUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(issueRecordReference); } } DrugRecord drugRecordParser = (DrugRecord)parsers.get(DrugRecord.class); while (drugRecordParser.nextRecord()) { CsvCell problemGuid = drugRecordParser.getProblemObservationGuid(); if (!problemGuid.isEmpty()) { CsvCell drugRecordGuid = drugRecordParser.getDrugRecordGuid(); CsvCell patientGuid = drugRecordParser.getPatientGuid(); String drugRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, drugRecordGuid); UUID drugRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationStatement, drugRecordSourceId); if (drugRecordUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.MedicationStatement + " and source ID " + drugRecordSourceId); } Reference drugRecordReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, drugRecordUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(drugRecordReference); } } for (AbstractCsvParser parser : parsers.values()) { try { parser.close(); } catch (IOException ex) { //don't worry if this fails, as we're done anyway } } } ResourceDalI resourceDal = DalProvider.factoryResourceDal(); LOG.info("Found " + consultationNewChildMap.size() + " Encounters to fix"); for (String encounterSourceId: consultationNewChildMap.keySet()) { ReferenceList childReferences = consultationNewChildMap.get(encounterSourceId); //map to UUID UUID encounterId = IdHelper.getEdsResourceId(serviceId, ResourceType.Encounter, encounterSourceId); if (encounterId == null) { continue; } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Encounter.toString(), encounterId); if (history.isEmpty()) { continue; //throw new Exception("Empty history for Encounter " + encounterId); } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (wrapper.getResourceData() != null) { Encounter encounter = (Encounter) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); EncounterBuilder encounterBuilder = new EncounterBuilder(encounter); ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder); List<Reference> previousChildren = containedListBuilder.getContainedListItems(); childReferences.add(previousChildren); } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Encounter encounter = (Encounter)FhirSerializationHelper.deserializeResource(currentState.getResourceData()); EncounterBuilder encounterBuilder = new EncounterBuilder(encounter); ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder); containedListBuilder.addReferences(childReferences); String newJson = FhirSerializationHelper.serializeResource(encounter); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState);*//* } LOG.info("Found " + observationChildMap.size() + " Parent Observations to fix"); for (String sourceId: observationChildMap.keySet()) { ReferenceList childReferences = observationChildMap.get(sourceId); //map to UUID ResourceType resourceType = null; UUID resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.Observation, sourceId); if (resourceId != null) { resourceType = ResourceType.Observation; } else { resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.DiagnosticReport, sourceId); if (resourceId != null) { resourceType = ResourceType.DiagnosticReport; } else { continue; } } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceType.toString(), resourceId); if (history.isEmpty()) { //throw new Exception("Empty history for " + resourceType + " " + resourceId); continue; } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (resourceType == ResourceType.Observation) { if (wrapper.getResourceData() != null) { Observation observation = (Observation) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); if (observation.hasRelated()) { for (Observation.ObservationRelatedComponent related : observation.getRelated()) { Reference reference = related.getTarget(); childReferences.add(reference); } } } } else { if (wrapper.getResourceData() != null) { DiagnosticReport report = (DiagnosticReport) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); if (report.hasResult()) { for (Reference reference : report.getResult()) { childReferences.add(reference); } } } } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Resource resource = FhirSerializationHelper.deserializeResource(currentState.getResourceData()); boolean changed = false; if (resourceType == ResourceType.Observation) { ObservationBuilder resourceBuilder = new ObservationBuilder((Observation)resource); for (int i=0; i<childReferences.size(); i++) { Reference reference = childReferences.getReference(i); if (resourceBuilder.addChildObservation(reference)) { changed = true; } } } else { DiagnosticReportBuilder resourceBuilder = new DiagnosticReportBuilder((DiagnosticReport)resource); for (int i=0; i<childReferences.size(); i++) { Reference reference = childReferences.getReference(i); if (resourceBuilder.addResult(reference)) { changed = true; } } } if (changed) { String newJson = FhirSerializationHelper.serializeResource(resource); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); }*//* } LOG.info("Found " + newProblemChildren.size() + " Problems to fix"); for (String sourceId: newProblemChildren.keySet()) { ReferenceList childReferences = newProblemChildren.get(sourceId); //map to UUID UUID conditionId = IdHelper.getEdsResourceId(serviceId, ResourceType.Condition, sourceId); if (conditionId == null) { continue; } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Condition.toString(), conditionId); if (history.isEmpty()) { continue; //throw new Exception("Empty history for Condition " + conditionId); } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (wrapper.getResourceData() != null) { Condition previousVersion = (Condition) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); ConditionBuilder conditionBuilder = new ConditionBuilder(previousVersion); ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder); List<Reference> previousChildren = containedListBuilder.getContainedListItems(); childReferences.add(previousChildren); } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Condition condition = (Condition)FhirSerializationHelper.deserializeResource(currentState.getResourceData()); ConditionBuilder conditionBuilder = new ConditionBuilder(condition); ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder); containedListBuilder.addReferences(childReferences); String newJson = FhirSerializationHelper.serializeResource(condition); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState);*//* } //mark as done String updateSql = "UPDATE " + table + " SET done = 1 WHERE service_id = '" + serviceId + "';"; entityManager = ConnectionManager.getAdminEntityManager(); session = (SessionImpl)entityManager.getDelegate(); connection = session.connection(); statement = connection.createStatement(); entityManager.getTransaction().begin(); statement.executeUpdate(updateSql); entityManager.getTransaction().commit(); } *//** * For each practice: Go through all files processed since 14 March Cache all links as above Cache all Encounters saved too For each Encounter referenced at all: Retrieve latest version from resource current Retrieve version prior to 14 March Update current version with old references plus new ones For each parent observation: Retrieve latest version (could be observation or diagnostic report) For each problem: Retrieve latest version from resource current Check if still a problem: Retrieve version prior to 14 March Update current version with old references plus new ones *//* LOG.info("Finished Fixing encounters from " + table); } catch (Throwable t) { LOG.error("", t); } }*/ private static void saveResourceWrapper(UUID serviceId, ResourceWrapper wrapper) throws Exception { if (wrapper.getVersion() == null) { throw new Exception("Can't update resource history without version UUID"); } if (wrapper.getResourceData() != null) { long checksum = FhirStorageService.generateChecksum(wrapper.getResourceData()); wrapper.setResourceChecksum(new Long(checksum)); } EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); entityManager.getTransaction().begin(); String json = wrapper.getResourceData(); json = json.replace("'", "''"); json = json.replace("\\", "\\\\"); String patientId = ""; if (wrapper.getPatientId() != null) { patientId = wrapper.getPatientId().toString(); } String updateSql = "UPDATE resource_current" + " SET resource_data = '" + json + "'," + " resource_checksum = " + wrapper.getResourceChecksum() + " WHERE service_id = '" + wrapper.getServiceId() + "'" + " AND patient_id = '" + patientId + "'" + " AND resource_type = '" + wrapper.getResourceType() + "'" + " AND resource_id = '" + wrapper.getResourceId() + "'"; statement.executeUpdate(updateSql); //LOG.debug(updateSql); //SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:SS"); //String createdAtStr = sdf.format(wrapper.getCreatedAt()); updateSql = "UPDATE resource_history" + " SET resource_data = '" + json + "'," + " resource_checksum = " + wrapper.getResourceChecksum() + " WHERE resource_id = '" + wrapper.getResourceId() + "'" + " AND resource_type = '" + wrapper.getResourceType() + "'" //+ " AND created_at = '" + createdAtStr + "'" + " AND version = '" + wrapper.getVersion() + "'"; statement.executeUpdate(updateSql); //LOG.debug(updateSql); entityManager.getTransaction().commit(); } /*private static void populateNewSearchTable(String table) { LOG.info("Populating New Search Table"); try { EntityManager entityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); List<String> patientIds = new ArrayList<>(); Map<String, String> serviceIds = new HashMap<>(); String sql = "SELECT patient_id, service_id FROM " + table + " WHERE done = 0"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { String patientId = rs.getString(1); String serviceId = rs.getString(2); patientIds.add(patientId); serviceIds.put(patientId, serviceId); } rs.close(); statement.close(); entityManager.close(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearch2Dal(); LOG.info("Found " + patientIds.size() + " to do"); for (int i=0; i<patientIds.size(); i++) { String patientIdStr = patientIds.get(i); UUID patientId = UUID.fromString(patientIdStr); String serviceIdStr = serviceIds.get(patientIdStr); UUID serviceId = UUID.fromString(serviceIdStr); Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(serviceId, ResourceType.Patient, patientIdStr); if (patient != null) { patientSearchDal.update(serviceId, patient); //find episode of care List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, null, patientId, ResourceType.EpisodeOfCare.toString()); for (ResourceWrapper wrapper: wrappers) { if (!wrapper.isDeleted()) { EpisodeOfCare episodeOfCare = (EpisodeOfCare)FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); patientSearchDal.update(serviceId, episodeOfCare); } } } String updateSql = "UPDATE " + table + " SET done = 1 WHERE patient_id = '" + patientIdStr + "' AND service_id = '" + serviceIdStr + "';"; entityManager = ConnectionManager.getEdsEntityManager(); session = (SessionImpl)entityManager.getDelegate(); connection = session.connection(); statement = connection.createStatement(); entityManager.getTransaction().begin(); statement.executeUpdate(updateSql); entityManager.getTransaction().commit(); if (i % 5000 == 0) { LOG.info("Done " + (i+1) + " of " + patientIds.size()); } } entityManager.close(); LOG.info("Finished Populating New Search Table"); } catch (Exception ex) { LOG.error("", ex); } }*/ private static void createBartsSubset(String sourceDir, UUID serviceUuid, UUID systemUuid, String samplePatientsFile) { LOG.info("Creating Barts Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line: lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } createBartsSubsetForFile(sourceDir, serviceUuid, systemUuid, personIds); LOG.info("Finished Creating Barts Subset"); } catch (Throwable t) { LOG.error("", t); } } /*private static void createBartsSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { for (File sourceFile: sourceDir.listFiles()) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } LOG.info("Doing dir " + sourceFile); createBartsSubsetForFile(sourceFile, destFile, personIds); } else { //we have some bad partial files in, so ignore them String ext = FilenameUtils.getExtension(name); if (ext.equalsIgnoreCase("filepart")) { continue; } //if the file is empty, we still need the empty file in the filtered directory, so just copy it if (sourceFile.length() == 0) { LOG.info("Copying empty file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } continue; } String baseName = FilenameUtils.getBaseName(name); String fileType = BartsCsvToFhirTransformer.identifyFileType(baseName); if (isCerner22File(fileType)) { LOG.info("Checking 2.2 file " + sourceFile); if (destFile.exists()) { destFile.delete(); } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); int lineIndex = -1; PrintWriter pw = null; int personIdColIndex = -1; int expectedCols = -1; while (true) { String line = br.readLine(); if (line == null) { break; } lineIndex ++; if (lineIndex == 0) { if (fileType.equalsIgnoreCase("FAMILYHISTORY")) { //this file has no headers, so needs hard-coding personIdColIndex = 5; } else { //check headings for PersonID col String[] toks = line.split("\\|", -1); expectedCols = toks.length; for (int i=0; i<expectedCols; i++) { String col = toks[i]; if (col.equalsIgnoreCase("PERSON_ID") || col.equalsIgnoreCase("#PERSON_ID")) { personIdColIndex = i; break; } } //if no person ID, then just copy the entire file if (personIdColIndex == -1) { br.close(); br = null; LOG.info(" Copying 2.2 file to " + destFile); copyFile(sourceFile, destFile); break; } else { LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex); } } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); pw = new PrintWriter(bw); } else { //filter on personID String[] toks = line.split("\\|", -1); if (expectedCols != -1 && toks.length != expectedCols) { throw new Exception("Line " + (lineIndex+1) + " has " + toks.length + " cols but expecting " + expectedCols); } else { String personId = toks[personIdColIndex]; if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes && !personIds.contains(personId)) { continue; } } } pw.println(line); } if (br != null) { br.close(); } if (pw != null) { pw.flush(); pw.close(); } } else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } } } } }*/ private static void createBartsSubsetForFile(String sourceDir, UUID serviceUuid, UUID systemUuid, Set<String> personIds) throws Exception { ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE); for (Exchange exchange: exchanges) { List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); for (ExchangePayloadFile fileObj : files) { String filePathWithoutSharedStorage = fileObj.getPath().substring(TransformConfig.instance().getSharedStoragePath().length()+1); String sourceFilePath = FilenameUtils.concat(sourceDir, filePathWithoutSharedStorage); File sourceFile = new File(sourceFilePath); String destFilePath = fileObj.getPath(); File destFile = new File(destFilePath); File destDir = destFile.getParentFile(); if (!destDir.exists()) { destDir.mkdirs(); } //if the file is empty, we still need the empty file in the filtered directory, so just copy it if (sourceFile.length() == 0) { LOG.info("Copying empty file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } continue; } String fileType = fileObj.getType(); if (isCerner22File(fileType)) { LOG.info("Checking 2.2 file " + sourceFile); if (destFile.exists()) { destFile.delete(); } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); int lineIndex = -1; PrintWriter pw = null; int personIdColIndex = -1; int expectedCols = -1; while (true) { String line = br.readLine(); if (line == null) { break; } lineIndex++; if (lineIndex == 0) { if (fileType.equalsIgnoreCase("FAMILYHISTORY")) { //this file has no headers, so needs hard-coding personIdColIndex = 5; } else { //check headings for PersonID col String[] toks = line.split("\\|", -1); expectedCols = toks.length; for (int i = 0; i < expectedCols; i++) { String col = toks[i]; if (col.equalsIgnoreCase("PERSON_ID") || col.equalsIgnoreCase("#PERSON_ID")) { personIdColIndex = i; break; } } //if no person ID, then just copy the entire file if (personIdColIndex == -1) { br.close(); br = null; LOG.info(" Copying 2.2 file to " + destFile); copyFile(sourceFile, destFile); break; } else { LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex); } } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); pw = new PrintWriter(bw); } else { //filter on personID String[] toks = line.split("\\|", -1); if (expectedCols != -1 && toks.length != expectedCols) { throw new Exception("Line " + (lineIndex + 1) + " has " + toks.length + " cols but expecting " + expectedCols); } else { String personId = toks[personIdColIndex]; if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes && !personIds.contains(personId)) { continue; } } } pw.println(line); } if (br != null) { br.close(); } if (pw != null) { pw.flush(); pw.close(); } } else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } } } } } private static void copyFile(File src, File dst) throws Exception { FileInputStream fis = new FileInputStream(src); BufferedInputStream bis = new BufferedInputStream(fis); Files.copy(bis, dst.toPath()); bis.close(); } private static boolean isCerner22File(String fileType) throws Exception { if (fileType.equalsIgnoreCase("PPATI") || fileType.equalsIgnoreCase("PPREL") || fileType.equalsIgnoreCase("CDSEV") || fileType.equalsIgnoreCase("PPATH") || fileType.equalsIgnoreCase("RTTPE") || fileType.equalsIgnoreCase("AEATT") || fileType.equalsIgnoreCase("AEINV") || fileType.equalsIgnoreCase("AETRE") || fileType.equalsIgnoreCase("OPREF") || fileType.equalsIgnoreCase("OPATT") || fileType.equalsIgnoreCase("EALEN") || fileType.equalsIgnoreCase("EALSU") || fileType.equalsIgnoreCase("EALOF") || fileType.equalsIgnoreCase("HPSSP") || fileType.equalsIgnoreCase("IPEPI") || fileType.equalsIgnoreCase("IPWDS") || fileType.equalsIgnoreCase("DELIV") || fileType.equalsIgnoreCase("BIRTH") || fileType.equalsIgnoreCase("SCHAC") || fileType.equalsIgnoreCase("APPSL") || fileType.equalsIgnoreCase("DIAGN") || fileType.equalsIgnoreCase("PROCE") || fileType.equalsIgnoreCase("ORDER") || fileType.equalsIgnoreCase("DOCRP") || fileType.equalsIgnoreCase("DOCREF") || fileType.equalsIgnoreCase("CNTRQ") || fileType.equalsIgnoreCase("LETRS") || fileType.equalsIgnoreCase("LOREF") || fileType.equalsIgnoreCase("ORGREF") || fileType.equalsIgnoreCase("PRSNLREF") || fileType.equalsIgnoreCase("CVREF") || fileType.equalsIgnoreCase("NOMREF") || fileType.equalsIgnoreCase("EALIP") || fileType.equalsIgnoreCase("CLEVE") || fileType.equalsIgnoreCase("ENCNT") || fileType.equalsIgnoreCase("RESREF") || fileType.equalsIgnoreCase("PPNAM") || fileType.equalsIgnoreCase("PPADD") || fileType.equalsIgnoreCase("PPPHO") || fileType.equalsIgnoreCase("PPALI") || fileType.equalsIgnoreCase("PPINF") || fileType.equalsIgnoreCase("PPAGP") || fileType.equalsIgnoreCase("SURCC") || fileType.equalsIgnoreCase("SURCP") || fileType.equalsIgnoreCase("SURCA") || fileType.equalsIgnoreCase("SURCD") || fileType.equalsIgnoreCase("PDRES") || fileType.equalsIgnoreCase("PDREF") || fileType.equalsIgnoreCase("ABREF") || fileType.equalsIgnoreCase("CEPRS") || fileType.equalsIgnoreCase("ORDDT") || fileType.equalsIgnoreCase("STATREF") || fileType.equalsIgnoreCase("STATA") || fileType.equalsIgnoreCase("ENCINF") || fileType.equalsIgnoreCase("SCHDETAIL") || fileType.equalsIgnoreCase("SCHOFFER") || fileType.equalsIgnoreCase("PPGPORG") || fileType.equalsIgnoreCase("FAMILYHISTORY")) { return true; } else { return false; } } /*private static void fixSubscriberDbs() { LOG.info("Fixing Subscriber DBs"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-05-11"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); boolean needsFixing = false; for (UUID exchangeId: exchangeIds) { if (!needsFixing) { List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId); for (ExchangeTransformAudit audit: transformAudits) { Date transfromStart = audit.getStarted(); if (!transfromStart.before(dateError)) { needsFixing = true; break; } } } if (!needsFixing) { continue; } List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId); Exchange exchange = exchangeDal.getExchange(exchangeId); LOG.info(" Posting exchange " + exchangeId + " with " + batches.size() + " batches"); List<UUID> batchIds = new ArrayList<>(); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } UUID batchId = batch.getBatchId(); batchIds.add(batchId); } String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } } LOG.info("Finished Fixing Subscriber DBs"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixReferralRequests() { LOG.info("Fixing Referral Requests"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-04-24"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); boolean needsFixing = false; Set<UUID> patientIdsToPost = new HashSet<>(); for (UUID exchangeId: exchangeIds) { if (!needsFixing) { List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId); for (ExchangeTransformAudit audit: transformAudits) { Date transfromStart = audit.getStarted(); if (!transfromStart.before(dateError)) { needsFixing = true; break; } } } if (!needsFixing) { continue; } List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId); Exchange exchange = exchangeDal.getExchange(exchangeId); LOG.info("Checking exchange " + exchangeId + " with " + batches.size() + " batches"); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } UUID batchId = batch.getBatchId(); List<ResourceWrapper> wrappers = resourceDal.getResourcesForBatch(serviceId, batchId); for (ResourceWrapper wrapper: wrappers) { String resourceType = wrapper.getResourceType(); if (!resourceType.equals(ResourceType.ReferralRequest.toString()) || wrapper.isDeleted()) { continue; } String json = wrapper.getResourceData(); ReferralRequest referral = (ReferralRequest)FhirSerializationHelper.deserializeResource(json); *//*if (!referral.hasServiceRequested()) { continue; } CodeableConcept reason = referral.getServiceRequested().get(0); referral.setReason(reason); referral.getServiceRequested().clear();*//* if (!referral.hasReason()) { continue; } CodeableConcept reason = referral.getReason(); referral.setReason(null); referral.addServiceRequested(reason); json = FhirSerializationHelper.serializeResource(referral); wrapper.setResourceData(json); saveResourceWrapper(serviceId, wrapper); //add to the set of patients we know need sending on to the protocol queue patientIdsToPost.add(patientId); LOG.info("Fixed " + resourceType + " " + wrapper.getResourceId() + " in batch " + batchId); } //if our patient has just been fixed or was fixed before, post onto the protocol queue if (patientIdsToPost.contains(patientId)) { List<UUID> batchIds = new ArrayList<>(); batchIds.add(batchId); String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } } } } LOG.info("Finished Fixing Referral Requests"); } catch (Throwable t) { LOG.error("", t); } }*/ private static void applyEmisAdminCaches() { LOG.info("Applying Emis Admin Caches"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } if (!exchangeDal.isServiceStarted(serviceId, endpointSystemId)) { LOG.info(" Service not started, so skipping"); continue; } //get exchanges List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); if (exchangeIds.isEmpty()) { LOG.info(" No exchanges found, so skipping"); continue; } UUID firstExchangeId = exchangeIds.get(0); List<ExchangeEvent> events = exchangeDal.getExchangeEvents(firstExchangeId); boolean appliedAdminCache = false; for (ExchangeEvent event: events) { if (event.getEventDesc().equals("Applied Emis Admin Resource Cache")) { appliedAdminCache = true; } } if (appliedAdminCache) { LOG.info(" Have already applied admin cache, so skipping"); continue; } Exchange exchange = exchangeDal.getExchange(firstExchangeId); String body = exchange.getBody(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(body); if (files.length == 0) { LOG.info(" No files in exchange " + firstExchangeId + " so skipping"); continue; } String firstFilePath = files[0]; String name = FilenameUtils.getBaseName(firstFilePath); //file name without extension String[] toks = name.split("_"); if (toks.length != 5) { throw new TransformException("Failed to extract data sharing agreement GUID from filename " + firstFilePath); } String sharingAgreementGuid = toks[4]; List<UUID> batchIds = new ArrayList<>(); TransformError transformError = new TransformError(); FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(firstExchangeId, serviceId, endpointSystemId, transformError, batchIds); EmisCsvHelper csvHelper = new EmisCsvHelper(fhirResourceFiler.getServiceId(), fhirResourceFiler.getSystemId(), fhirResourceFiler.getExchangeId(), sharingAgreementGuid, true); ExchangeTransformAudit transformAudit = new ExchangeTransformAudit(); transformAudit.setServiceId(serviceId); transformAudit.setSystemId(endpointSystemId); transformAudit.setExchangeId(firstExchangeId); transformAudit.setId(UUID.randomUUID()); transformAudit.setStarted(new Date()); LOG.info(" Going to apply admin resource cache"); csvHelper.applyAdminResourceCache(fhirResourceFiler); fhirResourceFiler.waitToFinish(); for (UUID batchId: batchIds) { LOG.info(" Created batch ID " + batchId + " for exchange " + firstExchangeId); } transformAudit.setEnded(new Date()); transformAudit.setNumberBatchesCreated(new Integer(batchIds.size())); boolean hadError = false; if (transformError.getError().size() > 0) { transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError)); hadError = true; } exchangeDal.save(transformAudit); //clear down the cache of reference mappings since they won't be of much use for the next Exchange IdHelper.clearCache(); if (hadError) { LOG.error(" <<<<<<Error applying resource cache!"); continue; } //add the event to say we've applied the cache AuditWriter.writeExchangeEvent(firstExchangeId, "Applied Emis Admin Resource Cache"); //post that ONE new batch ID onto the protocol queue String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } LOG.info("Finished Applying Emis Admin Caches"); } catch (Throwable t) { LOG.error("", t); } } /*private static void fixBartsEscapedFiles(String filePath) { LOG.info("Fixing Barts Escaped Files in " + filePath); try { fixBartsEscapedFilesInDir(new File(filePath)); LOG.info("Finished fixing Barts Escaped Files in " + filePath); } catch (Throwable t) { LOG.error("", t); } } /** * fixes Emis extract(s) when a practice was disabled then subsequently re-bulked, by * replacing the "delete" extracts with newly generated deltas that can be processed * before the re-bulk is done */ private static void fixDisabledEmisExtract(String serviceOdsCode, String systemId, String sharedStoragePath, String tempDirParent) { LOG.info("Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceOdsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(serviceOdsCode); LOG.info("Service " + service.getId() + " " + service.getName()); /*File tempDirLast = new File(tempDir, "last"); if (!tempDirLast.exists()) { if (!tempDirLast.mkdirs()) { throw new Exception("Failed to create temp dir " + tempDirLast); } tempDirLast.mkdirs(); } File tempDirEmpty = new File(tempDir, "empty"); if (!tempDirEmpty.exists()) { if (!tempDirEmpty.mkdirs()) { throw new Exception("Failed to create temp dir " + tempDirEmpty); } tempDirEmpty.mkdirs(); }*/ String tempDir = FilenameUtils.concat(tempDirParent, serviceOdsCode); File f = new File(tempDir); if (f.exists()) { FileUtils.deleteDirectory(f); } UUID serviceUuid = service.getId(); UUID systemUuid = UUID.fromString(systemId); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); //get all the exchanges, which are returned in reverse order, so reverse for simplicity List<Exchange> exchangesDesc = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE); Map<Exchange, List<String>> hmExchangeFiles = new HashMap<>(); Map<Exchange, List<String>> hmExchangeFilesWithoutStoragePrefix = new HashMap<>(); //reverse the exchange list and cache the files for each one List<Exchange> exchanges = new ArrayList<>(); for (int i=exchangesDesc.size()-1; i>=0; i--) { Exchange exchange = exchangesDesc.get(i); String exchangeBody = exchange.getBody(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); //drop out and ignore any exchanges containing the singular bespoke reg status files if (files.length <= 1) { continue; } //drop out and ignore any exchanges for the left and dead extracts, since we don't //expect to receive re-bulked data for the dead patients String firstFile = files[0]; if (firstFile.indexOf("LEFT_AND_DEAD") > -1) { continue; } exchanges.add(exchange); //populate the map of the files with the shared storage prefix List<String> fileList = Lists.newArrayList(files); hmExchangeFiles.put(exchange, fileList); //populate a map of the same files without the prefix files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); for (int j=0; j<files.length; j++) { String file = files[j].substring(sharedStoragePath.length() + 1); files[j] = file; } fileList = Lists.newArrayList(files); hmExchangeFilesWithoutStoragePrefix.put(exchange, fileList); } /*exchanges.sort((o1, o2) -> { Date d1 = o1.getTimestamp(); Date d2 = o2.getTimestamp(); return d1.compareTo(d2); });*/ LOG.info("Found " + exchanges.size() + " exchanges and cached their files"); int indexDisabled = -1; int indexRebulked = -1; int indexOriginallyBulked = -1; //go back through them to find the extract where the re-bulk is and when it was disabled for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); boolean disabled = isDisabledInSharingAgreementFile(files); if (disabled) { indexDisabled = i; } else { if (indexDisabled == -1) { indexRebulked = i; } else { //if we've found a non-disabled extract older than the disabled ones, //then we've gone far enough back break; } } } //go back from when disabled to find the previous bulk load (i.e. the first one or one after it was previously not disabled) for (int i=indexDisabled-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); boolean disabled = isDisabledInSharingAgreementFile(files); if (disabled) { break; } indexOriginallyBulked = i; } if (indexOriginallyBulked > -1) { Exchange exchangeOriginallyBulked = exchanges.get(indexOriginallyBulked); LOG.info("Originally bulked on " + findExtractDate(exchangeOriginallyBulked, hmExchangeFiles) + " " + exchangeOriginallyBulked.getId()); } if (indexDisabled > -1) { Exchange exchangeDisabled = exchanges.get(indexDisabled); LOG.info("Disabled on " + findExtractDate(exchangeDisabled, hmExchangeFiles) + " " + exchangeDisabled.getId()); } if (indexRebulked > -1) { Exchange exchangeRebulked = exchanges.get(indexRebulked); LOG.info("Rebulked on " + findExtractDate(exchangeRebulked, hmExchangeFiles) + " " + exchangeRebulked.getId()); } if (indexDisabled == -1 || indexRebulked == -1 || indexOriginallyBulked == -1) { throw new Exception("Failed to find exchanges for original bulk (" + indexOriginallyBulked + ") disabling (" + indexDisabled + ") or re-bulking (" + indexRebulked + ")"); } //continueOrQuit(); Exchange exchangeRebulked = exchanges.get(indexRebulked); List<String> rebulkFiles = hmExchangeFiles.get(exchangeRebulked); List<String> tempFilesCreated = new ArrayList<>(); Set<String> patientGuidsDeletedOrTooOld = new HashSet<>(); for (String rebulkFile: rebulkFiles) { String fileType = findFileType(rebulkFile); if (!isPatientFile(fileType)) { continue; } LOG.info("Doing " + fileType); String guidColumnName = getGuidColumnName(fileType); //find all the guids in the re-bulk Set<String> idsInRebulk = new HashSet<>(); InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(rebulkFile); CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); String[] headers = null; try { headers = CsvHelper.getHeaderMapAsArray(csvParser); Iterator<CSVRecord> iterator = csvParser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); //get the patient and row guid out of the file and cache in our set String id = record.get("PatientGuid"); if (!Strings.isNullOrEmpty(guidColumnName)) { id += "//" + record.get(guidColumnName); } idsInRebulk.add(id); } } finally { csvParser.close(); } LOG.info("Found " + idsInRebulk.size() + " IDs in re-bulk file: " + rebulkFile); //create a replacement file for the exchange the service was disabled String replacementDisabledFile = null; Exchange exchangeDisabled = exchanges.get(indexDisabled); List<String> disabledFiles = hmExchangeFilesWithoutStoragePrefix.get(exchangeDisabled); for (String s: disabledFiles) { String disabledFileType = findFileType(s); if (disabledFileType.equals(fileType)) { replacementDisabledFile = FilenameUtils.concat(tempDir, s); File dir = new File(replacementDisabledFile).getParentFile(); if (!dir.exists()) { if (!dir.mkdirs()) { throw new Exception("Failed to create directory " + dir); } } tempFilesCreated.add(s); LOG.info("Created replacement file " + replacementDisabledFile); } } FileWriter fileWriter = new FileWriter(replacementDisabledFile); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers)); csvPrinter.flush(); Set<String> pastIdsProcessed = new HashSet<>(); //now go through all files of the same type PRIOR to the service was disabled //to find any rows that we'll need to explicitly delete because they were deleted while //the extract was disabled for (int i=indexDisabled-1; i>=indexOriginallyBulked; i--) { Exchange exchange = exchanges.get(i); String originalFile = null; List<String> files = hmExchangeFiles.get(exchange); for (String s: files) { String originalFileType = findFileType(s); if (originalFileType.equals(fileType)) { originalFile = s; break; } } if (originalFile == null) { continue; } LOG.info(" Reading " + originalFile); reader = FileHelper.readFileReaderFromSharedStorage(originalFile); csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); try { Iterator<CSVRecord> iterator = csvParser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String patientGuid = record.get("PatientGuid"); //get the patient and row guid out of the file and cache in our set String uniqueId = patientGuid; if (!Strings.isNullOrEmpty(guidColumnName)) { uniqueId += "//" + record.get(guidColumnName); } //if we're already handled this record in a more recent extract, then skip it if (pastIdsProcessed.contains(uniqueId)) { continue; } pastIdsProcessed.add(uniqueId); //if this ID isn't deleted and isn't in the re-bulk then it means //it WAS deleted in Emis Web but we didn't receive the delete, because it was deleted //from Emis Web while the extract feed was disabled //if the record is deleted, then we won't expect it in the re-bulk boolean deleted = Boolean.parseBoolean(record.get("Deleted")); if (deleted) { //if it's the Patient file, stick the patient GUID in a set so we know full patient record deletes if (fileType.equals("Admin_Patient")) { patientGuidsDeletedOrTooOld.add(patientGuid); } continue; } //if it's not the patient file and we refer to a patient that we know //has been deleted, then skip this row, since we know we're deleting the entire patient record if (patientGuidsDeletedOrTooOld.contains(patientGuid)) { continue; } //if the re-bulk contains a record matching this one, then it's OK if (idsInRebulk.contains(uniqueId)) { continue; } //the rebulk won't contain any data for patients that are now too old (i.e. deducted or deceased > 2 yrs ago), //so any patient ID in the original files but not in the rebulk can be treated like this and any data for them can be skipped if (fileType.equals("Admin_Patient")) { //retrieve the Patient and EpisodeOfCare resource for the patient so we can confirm they are deceased or deducted ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID patientUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.Patient, patientGuid); if (patientUuid == null) { throw new Exception("Failed to find patient UUID from GUID [" + patientGuid + "]"); } Patient patientResource = (Patient)resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.Patient, patientUuid.toString()); if (patientResource.hasDeceased()) { patientGuidsDeletedOrTooOld.add(patientGuid); continue; } UUID episodeUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.EpisodeOfCare, patientGuid); //we use the patient GUID for the episode too EpisodeOfCare episodeResource = (EpisodeOfCare)resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.EpisodeOfCare, episodeUuid.toString()); if (episodeResource.hasPeriod() && !PeriodHelper.isActive(episodeResource.getPeriod())) { patientGuidsDeletedOrTooOld.add(patientGuid); continue; } } //create a new CSV record, carrying over the GUIDs from the original but marking as deleted String[] newRecord = new String[headers.length]; for (int j=0; j<newRecord.length; j++) { String header = headers[j]; if (header.equals("PatientGuid") || header.equals("OrganisationGuid") || (!Strings.isNullOrEmpty(guidColumnName) && header.equals(guidColumnName))) { String val = record.get(header); newRecord[j] = val; } else if (header.equals("Deleted")) { newRecord[j] = "true"; } else { newRecord[j] = ""; } } csvPrinter.printRecord((Object[])newRecord); csvPrinter.flush(); //log out the raw record that's missing from the original StringBuffer sb = new StringBuffer(); sb.append("Record not in re-bulk: "); for (int j=0; j<record.size(); j++) { if (j > 0) { sb.append(","); } sb.append(record.get(j)); } LOG.info(sb.toString()); } } finally { csvParser.close(); } } csvPrinter.flush(); csvPrinter.close(); //also create a version of the CSV file with just the header and nothing else in for (int i=indexDisabled+1; i<indexRebulked; i++) { Exchange ex = exchanges.get(i); List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex); for (String s: exchangeFiles) { String exchangeFileType = findFileType(s); if (exchangeFileType.equals(fileType)) { String emptyTempFile = FilenameUtils.concat(tempDir, s); File dir = new File(emptyTempFile).getParentFile(); if (!dir.exists()) { if (!dir.mkdirs()) { throw new Exception("Failed to create directory " + dir); } } fileWriter = new FileWriter(emptyTempFile); bufferedWriter = new BufferedWriter(fileWriter); csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers)); csvPrinter.flush(); csvPrinter.close(); tempFilesCreated.add(s); LOG.info("Created empty file " + emptyTempFile); } } } } //we also need to copy the restored sharing agreement file to replace all the period it was disabled String rebulkedSharingAgreementFile = null; for (String s: rebulkFiles) { String fileType = findFileType(s); if (fileType.equals("Agreements_SharingOrganisation")) { rebulkedSharingAgreementFile = s; } } for (int i=indexDisabled; i<indexRebulked; i++) { Exchange ex = exchanges.get(i); List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex); for (String s: exchangeFiles) { String exchangeFileType = findFileType(s); if (exchangeFileType.equals("Agreements_SharingOrganisation")) { String replacementFile = FilenameUtils.concat(tempDir, s); InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkedSharingAgreementFile); File replacementFileObj = new File(replacementFile); Files.copy(inputStream, replacementFileObj.toPath()); inputStream.close(); tempFilesCreated.add(s); } } } //create a script to copy the files into S3 List<String> copyScript = new ArrayList<>(); copyScript.add("#!/bin/bash"); copyScript.add(""); for (String s: tempFilesCreated) { String localFile = FilenameUtils.concat(tempDir, s); copyScript.add("sudo aws s3 cp " + localFile + " s3://discoverysftplanding/endeavour/" + s); } String scriptFile = FilenameUtils.concat(tempDir, "copy.sh"); FileUtils.writeLines(new File(scriptFile), copyScript); LOG.info("Finished - written files to " + tempDir); dumpFileSizes(new File(tempDir)); /*continueOrQuit(); //back up every file where the service was disabled for (int i=indexDisabled; i<indexRebulked; i++) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); for (String file: files) { //first download from S3 to the local temp dir InputStream inputStream = FileHelper.readFileFromSharedStorage(file); String fileName = FilenameUtils.getName(file); String tempPath = FilenameUtils.concat(tempDir, fileName); File downloadDestination = new File(tempPath); Files.copy(inputStream, downloadDestination.toPath()); //then write back to S3 in a sub-dir of the original file String backupPath = FilenameUtils.getPath(file); backupPath = FilenameUtils.concat(backupPath, "Original"); backupPath = FilenameUtils.concat(backupPath, fileName); FileHelper.writeFileToSharedStorage(backupPath, downloadDestination); LOG.info("Backed up " + file + " -> " + backupPath); //delete from temp dir downloadDestination.delete(); } } continueOrQuit(); //copy the new CSV files into the dir where it was disabled List<String> disabledFiles = hmExchangeFiles.get(exchangeDisabled); for (String disabledFile: disabledFiles) { String fileType = findFileType(disabledFile); if (!isPatientFile(fileType)) { continue; } String tempFile = FilenameUtils.concat(tempDirLast.getAbsolutePath(), fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected temp file " + f); } FileHelper.writeFileToSharedStorage(disabledFile, f); LOG.info("Copied " + tempFile + " -> " + disabledFile); } continueOrQuit(); //empty the patient files for any extracts while the service was disabled for (int i=indexDisabled+1; i<indexRebulked; i++) { Exchange otherExchangeDisabled = exchanges.get(i); List<String> otherDisabledFiles = hmExchangeFiles.get(otherExchangeDisabled); for (String otherDisabledFile: otherDisabledFiles) { String fileType = findFileType(otherDisabledFile); if (!isPatientFile(fileType)) { continue; } String tempFile = FilenameUtils.concat(tempDirEmpty.getAbsolutePath(), fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected empty file " + f); } FileHelper.writeFileToSharedStorage(otherDisabledFile, f); LOG.info("Copied " + tempFile + " -> " + otherDisabledFile); } } continueOrQuit(); //copy the content of the sharing agreement file from when it was re-bulked for (String rebulkFile: rebulkFiles) { String fileType = findFileType(rebulkFile); if (fileType.equals("Agreements_SharingOrganisation")) { String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv"); File downloadDestination = new File(tempFile); InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkFile); Files.copy(inputStream, downloadDestination.toPath()); tempFilesCreated.add(tempFile); } } //replace the sharing agreement file for all disabled extracts with the non-disabled one for (int i=indexDisabled; i<indexRebulked; i++) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); for (String file: files) { String fileType = findFileType(file); if (fileType.equals("Agreements_SharingOrganisation")) { String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected empty file " + f); } FileHelper.writeFileToSharedStorage(file, f); LOG.info("Copied " + tempFile + " -> " + file); } } } LOG.info("Finished Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceId); continueOrQuit(); for (String tempFileCreated: tempFilesCreated) { File f = new File(tempFileCreated); if (f.exists()) { f.delete(); } }*/ } catch (Exception ex) { LOG.error("", ex); } } private static void dumpFileSizes(File f) { if (f.isDirectory()) { for (File child: f.listFiles()) { dumpFileSizes(child); } } else { String totalSizeReadable = FileUtils.byteCountToDisplaySize(f.length()); LOG.info("" + f + " = " + totalSizeReadable); } } private static String findExtractDate(Exchange exchange, Map<Exchange, List<String>> fileMap) throws Exception { List<String> files = fileMap.get(exchange); String file = findSharingAgreementFile(files); String name = FilenameUtils.getBaseName(file); String[] toks = name.split("_"); return toks[3]; } private static boolean isDisabledInSharingAgreementFile(List<String> files) throws Exception { String file = findSharingAgreementFile(files); InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(file); CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); try { Iterator<CSVRecord> iterator = csvParser.iterator(); CSVRecord record = iterator.next(); String s = record.get("Disabled"); boolean disabled = Boolean.parseBoolean(s); return disabled; } finally { csvParser.close(); } } private static void continueOrQuit() throws Exception { LOG.info("Enter y to continue, anything else to quit"); byte[] bytes = new byte[10]; System.in.read(bytes); char c = (char)bytes[0]; if (c != 'y' && c != 'Y') { System.out.println("Read " + c); System.exit(1); } } private static String getGuidColumnName(String fileType) { if (fileType.equals("Admin_Patient")) { //patient file just has patient GUID, nothing extra return null; } else if (fileType.equals("CareRecord_Consultation")) { return "ConsultationGuid"; } else if (fileType.equals("CareRecord_Diary")) { return "DiaryGuid"; } else if (fileType.equals("CareRecord_Observation")) { return "ObservationGuid"; } else if (fileType.equals("CareRecord_Problem")) { //there is no separate problem GUID, as it's just a modified observation return "ObservationGuid"; } else if (fileType.equals("Prescribing_DrugRecord")) { return "DrugRecordGuid"; } else if (fileType.equals("Prescribing_IssueRecord")) { return "IssueRecordGuid"; } else { throw new IllegalArgumentException(fileType); } } private static String findFileType(String filePath) { String fileName = FilenameUtils.getName(filePath); String[] toks = fileName.split("_"); String domain = toks[1]; String name = toks[2]; return domain + "_" + name; } private static boolean isPatientFile(String fileType) { if (fileType.equals("Admin_Patient") || fileType.equals("CareRecord_Consultation") || fileType.equals("CareRecord_Diary") || fileType.equals("CareRecord_Observation") || fileType.equals("CareRecord_Problem") || fileType.equals("Prescribing_DrugRecord") || fileType.equals("Prescribing_IssueRecord")) { //note the referral file doesn't have a Deleted column, so isn't in this list return true; } else { return false; } } private static String findSharingAgreementFile(List<String> files) throws Exception { for (String file : files) { String fileType = findFileType(file); if (fileType.equals("Agreements_SharingOrganisation")) { return file; } } throw new Exception("Failed to find sharing agreement file in " + files.get(0)); } private static void testSlack() { LOG.info("Testing slack"); try { SlackHelper.sendSlackMessage(SlackHelper.Channel.QueueReaderAlerts, "Test Message from Queue Reader"); LOG.info("Finished testing slack"); } catch (Exception ex) { LOG.error("", ex); } } /*private static void postToInboundFromFile(UUID serviceId, UUID systemId, String filePath) { try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); Service service = serviceDalI.getById(serviceId); LOG.info("Posting to inbound exchange for " + service.getName() + " from file " + filePath); FileReader fr = new FileReader(filePath); BufferedReader br = new BufferedReader(fr); int count = 0; List<UUID> exchangeIdBatch = new ArrayList<>(); while (true) { String line = br.readLine(); if (line == null) { break; } UUID exchangeId = UUID.fromString(line); //update the transform audit, so EDS UI knows we've re-queued this exchange ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId); if (audit != null && !audit.isResubmitted()) { audit.setResubmitted(true); auditRepository.save(audit); } count ++; exchangeIdBatch.add(exchangeId); if (exchangeIdBatch.size() >= 1000) { QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false); exchangeIdBatch = new ArrayList<>(); LOG.info("Done " + count); } } if (!exchangeIdBatch.isEmpty()) { QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false); LOG.info("Done " + count); } br.close(); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Posting to inbound for " + serviceId); }*/ /*private static void postToInbound(UUID serviceId, boolean all) { LOG.info("Posting to inbound for " + serviceId); try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); Service service = serviceDalI.getById(serviceId); List<UUID> systemIds = findSystemIds(service); UUID systemId = systemIds.get(0); ExchangeTransformErrorState errorState = auditRepository.getErrorState(serviceId, systemId); for (UUID exchangeId: errorState.getExchangeIdsInError()) { //update the transform audit, so EDS UI knows we've re-queued this exchange ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId); //skip any exchange IDs we've already re-queued up to be processed again if (audit.isResubmitted()) { LOG.debug("Not re-posting " + audit.getExchangeId() + " as it's already been resubmitted"); continue; } LOG.debug("Re-posting " + audit.getExchangeId()); audit.setResubmitted(true); auditRepository.save(audit); //then re-submit the exchange to Rabbit MQ for the queue reader to pick up QueueHelper.postToExchange(exchangeId, "EdsInbound", null, false); if (!all) { LOG.info("Posted first exchange, so stopping"); break; } } } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Posting to inbound for " + serviceId); }*/ private static void fixPatientSearchAllServices(String filterSystemId) { LOG.info("Fixing patient search for all services and system " + filterSystemId); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { fixPatientSearch(service.getId().toString(), filterSystemId); } LOG.info("Finished Fixing patient search for all services and system " + filterSystemId); } catch (Throwable t) { LOG.error("", t); } } private static void fixPatientSearch(String serviceId, String filterSystemId) { LOG.info("Fixing patient search for service " + serviceId); try { UUID serviceUuid = UUID.fromString(serviceId); UUID filterSystemUuid = null; if (!Strings.isNullOrEmpty(filterSystemId)) { filterSystemUuid = UUID.fromString(filterSystemId); } ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Set<UUID> patientsDone = new HashSet<>(); Service service = serviceDal.getById(serviceUuid); List<UUID> systemIds = findSystemIds(service); for (UUID systemId: systemIds) { if (filterSystemUuid != null && !filterSystemUuid.equals(systemId)) { continue; } List<UUID> exchanges = exchangeDalI.getExchangeIdsForService(serviceUuid, systemId); LOG.info("Found " + exchanges.size() + " exchanges for system " + systemId); for (UUID exchangeId : exchanges) { List<ExchangeBatch> batches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); LOG.info("Found " + batches.size() + " batches in exchange " + exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } if (patientsDone.contains(patientId)) { continue; } patientsDone.add(patientId); ResourceWrapper wrapper = resourceDalI.getCurrentVersion(serviceUuid, ResourceType.Patient.toString(), patientId); if (wrapper != null) { String json = wrapper.getResourceData(); if (!Strings.isNullOrEmpty(json)) { Patient fhirPatient = (Patient)FhirSerializationHelper.deserializeResource(json); patientSearchDal.update(serviceUuid, fhirPatient); } } if (patientsDone.size() % 1000 == 0) { LOG.info("Done " + patientsDone.size()); } } } } LOG.info("Done " + patientsDone.size()); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished fixing patient search for " + serviceId); } private static void runSql(String host, String username, String password, String sqlFile) { LOG.info("Running SQL on " + host + " from " + sqlFile); Connection conn = null; Statement statement = null; try { File f = new File(sqlFile); if (!f.exists()) { LOG.error("" + f + " doesn't exist"); return; } List<String> lines = FileUtils.readLines(f); /*String combined = String.join("\n", lines); LOG.info("Going to run SQL"); LOG.info(combined);*/ //load driver Class.forName("com.mysql.cj.jdbc.Driver"); //create connection Properties props = new Properties(); props.setProperty("user", username); props.setProperty("password", password); conn = DriverManager.getConnection(host, props); LOG.info("Opened connection"); statement = conn.createStatement(); long totalStart = System.currentTimeMillis(); for (String sql: lines) { sql = sql.trim(); if (sql.startsWith("--") || sql.startsWith("/*") || Strings.isNullOrEmpty(sql)) { continue; } LOG.info(""); LOG.info(sql); long start = System.currentTimeMillis(); boolean hasResultSet = statement.execute(sql); long end = System.currentTimeMillis(); LOG.info("SQL took " + (end - start) + "ms"); if (hasResultSet) { while (true) { ResultSet rs = statement.getResultSet(); int cols = rs.getMetaData().getColumnCount(); List<String> colHeaders = new ArrayList<>(); for (int i = 0; i < cols; i++) { String header = rs.getMetaData().getColumnName(i + 1); colHeaders.add(header); } String colHeaderStr = String.join(", ", colHeaders); LOG.info(colHeaderStr); while (rs.next()) { List<String> row = new ArrayList<>(); for (int i = 0; i < cols; i++) { Object o = rs.getObject(i + 1); if (rs.wasNull()) { row.add("<null>"); } else { row.add(o.toString()); } } String rowStr = String.join(", ", row); LOG.info(rowStr); } if (!statement.getMoreResults()) { break; } } } else { int updateCount = statement.getUpdateCount(); LOG.info("Updated " + updateCount + " Row(s)"); } } long totalEnd = System.currentTimeMillis(); LOG.info(""); LOG.info("Total time taken " + (totalEnd - totalStart) + "ms"); } catch (Throwable t) { LOG.error("", t); } finally { if (statement != null) { try { statement.close(); } catch (Exception ex) { } } if (conn != null) { try { conn.close(); } catch (Exception ex) { } } LOG.info("Closed connection"); } LOG.info("Finished Testing DB Size Limit"); } /*private static void fixExchangeBatches() { LOG.info("Starting Fixing Exchange Batches"); try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); List<Service> services = serviceDalI.getAll(); for (Service service: services) { LOG.info("Doing " + service.getName()); List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(service.getId()); for (UUID exchangeId: exchangeIds) { LOG.info(" Exchange " + exchangeId); List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch: exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null) { continue; } List<ResourceWrapper> resources = resourceDalI.getResourcesForBatch(exchangeBatch.getBatchId()); if (resources.isEmpty()) { continue; } ResourceWrapper first = resources.get(0); UUID patientId = first.getPatientId(); if (patientId != null) { exchangeBatch.setEdsPatientId(patientId); exchangeBatchDalI.save(exchangeBatch); LOG.info("Fixed batch " + exchangeBatch.getBatchId() + " -> " + exchangeBatch.getEdsPatientId()); } } } } LOG.info("Finished Fixing Exchange Batches"); } catch (Exception ex) { LOG.error("", ex); } }*/ /** * exports ADT Encounters for patients based on a CSV file produced using the below SQL --USE EDS DATABASE -- barts b5a08769-cbbe-4093-93d6-b696cd1da483 -- homerton 962d6a9a-5950-47ac-9e16-ebee56f9507a create table adt_patients ( service_id character(36), system_id character(36), nhs_number character varying(10), patient_id character(36) ); -- delete from adt_patients; select * from patient_search limit 10; select * from patient_link limit 10; insert into adt_patients select distinct ps.service_id, ps.system_id, ps.nhs_number, ps.patient_id from patient_search ps join patient_link pl on pl.patient_id = ps.patient_id join patient_link pl2 on pl.person_id = pl2.person_id join patient_search ps2 on ps2.patient_id = pl2.patient_id where ps.service_id IN ('b5a08769-cbbe-4093-93d6-b696cd1da483', '962d6a9a-5950-47ac-9e16-ebee56f9507a') and ps2.service_id NOT IN ('b5a08769-cbbe-4093-93d6-b696cd1da483', '962d6a9a-5950-47ac-9e16-ebee56f9507a'); select count(1) from adt_patients limit 100; select * from adt_patients limit 100; ---MOVE TABLE TO HL7 RECEIVER DB select count(1) from adt_patients; -- top 1000 patients with messages select * from mapping.resource_uuid where resource_type = 'Patient' limit 10; select * from log.message limit 10; create table adt_patient_counts ( nhs_number character varying(100), count int ); insert into adt_patient_counts select pid1, count(1) from log.message where pid1 is not null and pid1 <> '' group by pid1; select * from adt_patient_counts order by count desc limit 100; alter table adt_patients add count int; update adt_patients set count = adt_patient_counts.count from adt_patient_counts where adt_patients.nhs_number = adt_patient_counts.nhs_number; select count(1) from adt_patients where nhs_number is null; select * from adt_patients where nhs_number is not null and count is not null order by count desc limit 1000; */ /*private static void exportHl7Encounters(String sourceCsvPath, String outputPath) { LOG.info("Exporting HL7 Encounters from " + sourceCsvPath + " to " + outputPath); try { File sourceFile = new File(sourceCsvPath); CSVParser csvParser = CSVParser.parse(sourceFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); //"service_id","system_id","nhs_number","patient_id","count" int count = 0; HashMap<UUID, List<UUID>> serviceAndSystemIds = new HashMap<>(); HashMap<UUID, Integer> patientIds = new HashMap<>(); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); count ++; String serviceId = csvRecord.get("service_id"); String systemId = csvRecord.get("system_id"); String patientId = csvRecord.get("patient_id"); UUID serviceUuid = UUID.fromString(serviceId); List<UUID> systemIds = serviceAndSystemIds.get(serviceUuid); if (systemIds == null) { systemIds = new ArrayList<>(); serviceAndSystemIds.put(serviceUuid, systemIds); } systemIds.add(UUID.fromString(systemId)); patientIds.put(UUID.fromString(patientId), new Integer(count)); } csvParser.close(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ParserPool parser = new ParserPool(); Map<Integer, List<Object[]>> patientRows = new HashMap<>(); SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); for (UUID serviceId: serviceAndSystemIds.keySet()) { //List<UUID> systemIds = serviceAndSystemIds.get(serviceId); Service service = serviceDalI.getById(serviceId); String serviceName = service.getName(); LOG.info("Doing service " + serviceId + " " + serviceName); List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(serviceId); LOG.info("Got " + exchangeIds.size() + " exchange IDs to scan"); int exchangeCount = 0; for (UUID exchangeId: exchangeIds) { exchangeCount ++; if (exchangeCount % 1000 == 0) { LOG.info("Done " + exchangeCount + " exchanges"); } List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch: exchangeBatches) { UUID patientId = exchangeBatch.getEdsPatientId(); if (patientId != null && !patientIds.containsKey(patientId)) { continue; } Integer patientIdInt = patientIds.get(patientId); //get encounters for exchange batch UUID batchId = exchangeBatch.getBatchId(); List<ResourceWrapper> resourceWrappers = resourceDalI.getResourcesForBatch(serviceId, batchId); for (ResourceWrapper resourceWrapper: resourceWrappers) { if (resourceWrapper.isDeleted()) { continue; } String resourceType = resourceWrapper.getResourceType(); if (!resourceType.equals(ResourceType.Encounter.toString())) { continue; } LOG.info("Processing " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId()); String json = resourceWrapper.getResourceData(); Encounter fhirEncounter = (Encounter)parser.parse(json); Date date = null; if (fhirEncounter.hasPeriod()) { Period period = fhirEncounter.getPeriod(); if (period.hasStart()) { date = period.getStart(); } } String episodeId = null; if (fhirEncounter.hasEpisodeOfCare()) { Reference episodeReference = fhirEncounter.getEpisodeOfCare().get(0); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(episodeReference); EpisodeOfCare fhirEpisode = (EpisodeOfCare)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirEpisode != null) { if (fhirEpisode.hasIdentifier()) { episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_BARTS_FIN_EPISODE_ID); if (Strings.isNullOrEmpty(episodeId)) { episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_HOMERTON_FIN_EPISODE_ID); } } } } String adtType = null; String adtCode = null; Extension extension = ExtensionConverter.findExtension(fhirEncounter, FhirExtensionUri.HL7_MESSAGE_TYPE); if (extension != null) { CodeableConcept codeableConcept = (CodeableConcept) extension.getValue(); Coding hl7MessageTypeCoding = CodeableConceptHelper.findCoding(codeableConcept, FhirUri.CODE_SYSTEM_HL7V2_MESSAGE_TYPE); if (hl7MessageTypeCoding != null) { adtType = hl7MessageTypeCoding.getDisplay(); adtCode = hl7MessageTypeCoding.getCode(); } } else { //for older formats of the transformed resources, the HL7 message type can only be found from the raw original exchange body try { Exchange exchange = exchangeDalI.getExchange(exchangeId); String exchangeBody = exchange.getBody(); Bundle bundle = (Bundle) FhirResourceHelper.deserialiseResouce(exchangeBody); for (Bundle.BundleEntryComponent entry: bundle.getEntry()) { if (entry.getResource() != null && entry.getResource() instanceof MessageHeader) { MessageHeader header = (MessageHeader)entry.getResource(); if (header.hasEvent()) { Coding coding = header.getEvent(); adtType = coding.getDisplay(); adtCode = coding.getCode(); } } } } catch (Exception ex) { //if the exchange body isn't a FHIR bundle, then we'll get an error by treating as such, so just ignore them } } String cls = null; if (fhirEncounter.hasClass_()) { Encounter.EncounterClass encounterClass = fhirEncounter.getClass_(); if (encounterClass == Encounter.EncounterClass.OTHER && fhirEncounter.hasClass_Element() && fhirEncounter.getClass_Element().hasExtension()) { for (Extension classExtension: fhirEncounter.getClass_Element().getExtension()) { if (classExtension.getUrl().equals(FhirExtensionUri.ENCOUNTER_CLASS)) { //not 100% of the type of the value, so just append to a String cls = "" + classExtension.getValue(); } } } if (Strings.isNullOrEmpty(cls)) { cls = encounterClass.toCode(); } } String type = null; if (fhirEncounter.hasType()) { //only seem to ever have one type CodeableConcept codeableConcept = fhirEncounter.getType().get(0); type = codeableConcept.getText(); } String status = null; if (fhirEncounter.hasStatus()) { Encounter.EncounterState encounterState = fhirEncounter.getStatus(); status = encounterState.toCode(); } String location = null; String locationType = null; if (fhirEncounter.hasLocation()) { //first location is always the current location Encounter.EncounterLocationComponent encounterLocation = fhirEncounter.getLocation().get(0); if (encounterLocation.hasLocation()) { Reference locationReference = encounterLocation.getLocation(); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(locationReference); Location fhirLocation = (Location)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirLocation != null) { if (fhirLocation.hasName()) { location = fhirLocation.getName(); } if (fhirLocation.hasType()) { CodeableConcept typeCodeableConcept = fhirLocation.getType(); if (typeCodeableConcept.hasCoding()) { Coding coding = typeCodeableConcept.getCoding().get(0); locationType = coding.getDisplay(); } } } } } String clinician = null; if (fhirEncounter.hasParticipant()) { //first participant seems to be the interesting one Encounter.EncounterParticipantComponent encounterParticipant = fhirEncounter.getParticipant().get(0); if (encounterParticipant.hasIndividual()) { Reference practitionerReference = encounterParticipant.getIndividual(); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(practitionerReference); Practitioner fhirPractitioner = (Practitioner)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirPractitioner != null) { if (fhirPractitioner.hasName()) { HumanName name = fhirPractitioner.getName(); clinician = name.getText(); if (Strings.isNullOrEmpty(clinician)) { clinician = ""; for (StringType s: name.getPrefix()) { clinician += s.getValueNotNull(); clinician += " "; } for (StringType s: name.getGiven()) { clinician += s.getValueNotNull(); clinician += " "; } for (StringType s: name.getFamily()) { clinician += s.getValueNotNull(); clinician += " "; } clinician = clinician.trim(); } } } } } Object[] row = new Object[12]; row[0] = serviceName; row[1] = patientIdInt.toString(); row[2] = sdfOutput.format(date); row[3] = episodeId; row[4] = adtCode; row[5] = adtType; row[6] = cls; row[7] = type; row[8] = status; row[9] = location; row[10] = locationType; row[11] = clinician; List<Object[]> rows = patientRows.get(patientIdInt); if (rows == null) { rows = new ArrayList<>(); patientRows.put(patientIdInt, rows); } rows.add(row); } } } } String[] outputColumnHeaders = new String[] {"Source", "Patient", "Date", "Episode ID", "ADT Message Code", "ADT Message Type", "Class", "Type", "Status", "Location", "Location Type", "Clinician"}; FileWriter fileWriter = new FileWriter(outputPath); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVFormat format = CSVFormat.DEFAULT .withHeader(outputColumnHeaders) .withQuote('"'); CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, format); for (int i=0; i <= count; i++) { Integer patientIdInt = new Integer(i); List<Object[]> rows = patientRows.get(patientIdInt); if (rows != null) { for (Object[] row: rows) { csvPrinter.printRecord(row); } } } csvPrinter.close(); bufferedWriter.close(); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Exporting Encounters from " + sourceCsvPath + " to " + outputPath); }*/ /*private static void registerShutdownHook() { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { LOG.info(""); try { Thread.sleep(5000); } catch (Throwable ex) { LOG.error("", ex); } LOG.info("Done"); } }); }*/ private static void findEmisStartDates(String path, String outputPath) { LOG.info("Finding EMIS Start Dates in " + path + ", writing to " + outputPath); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH.mm.ss"); Map<String, Date> startDates = new HashMap<>(); Map<String, String> servers = new HashMap<>(); Map<String, String> names = new HashMap<>(); Map<String, String> odsCodes = new HashMap<>(); Map<String, String> cdbNumbers = new HashMap<>(); Map<String, Set<String>> distinctPatients = new HashMap<>(); File root = new File(path); for (File sftpRoot: root.listFiles()) { LOG.info("Checking " + sftpRoot); Map<Date, File> extracts = new HashMap<>(); List<Date> extractDates = new ArrayList<>(); for (File extractRoot: sftpRoot.listFiles()) { Date d = sdf.parse(extractRoot.getName()); //LOG.info("" + extractRoot.getName() + " -> " + d); extracts.put(d, extractRoot); extractDates.add(d); } Collections.sort(extractDates); for (Date extractDate: extractDates) { File extractRoot = extracts.get(extractDate); LOG.info("Checking " + extractRoot); //read the sharing agreements file //e.g. 291_Agreements_SharingOrganisation_20150211164536_45E7CD20-EE37-41AB-90D6-DC9D4B03D102.csv File sharingAgreementsFile = null; for (File f: extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("agreements_sharingorganisation") > -1 && name.endsWith(".csv")) { sharingAgreementsFile = f; break; } } if (sharingAgreementsFile == null) { LOG.info("Null agreements file for " + extractRoot); continue; } CSVParser csvParser = CSVParser.parse(sharingAgreementsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String activated = csvRecord.get("IsActivated"); String disabled = csvRecord.get("Disabled"); servers.put(orgGuid, sftpRoot.getName()); if (activated.equalsIgnoreCase("true")) { if (disabled.equalsIgnoreCase("false")) { Date d = sdf.parse(extractRoot.getName()); Date existingDate = startDates.get(orgGuid); if (existingDate == null) { startDates.put(orgGuid, d); } } else { if (startDates.containsKey(orgGuid)) { startDates.put(orgGuid, null); } } } } } finally { csvParser.close(); } //go through orgs file to get name, ods and cdb codes File orgsFile = null; for (File f: extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("admin_organisation_") > -1 && name.endsWith(".csv")) { orgsFile = f; break; } } csvParser = CSVParser.parse(orgsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String name = csvRecord.get("OrganisationName"); String odsCode = csvRecord.get("ODSCode"); String cdb = csvRecord.get("CDB"); names.put(orgGuid, name); odsCodes.put(orgGuid, odsCode); cdbNumbers.put(orgGuid, cdb); } } finally { csvParser.close(); } //go through patients file to get count File patientFile = null; for (File f: extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("admin_patient_") > -1 && name.endsWith(".csv")) { patientFile = f; break; } } csvParser = CSVParser.parse(patientFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String patientGuid = csvRecord.get("PatientGuid"); String deleted = csvRecord.get("Deleted"); Set<String> distinctPatientSet = distinctPatients.get(orgGuid); if (distinctPatientSet == null) { distinctPatientSet = new HashSet<>(); distinctPatients.put(orgGuid, distinctPatientSet); } if (deleted.equalsIgnoreCase("true")) { distinctPatientSet.remove(patientGuid); } else { distinctPatientSet.add(patientGuid); } } } finally { csvParser.close(); } } } SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd"); StringBuilder sb = new StringBuilder(); sb.append("Name,OdsCode,CDB,OrgGuid,StartDate,Server,Patients"); for (String orgGuid: startDates.keySet()) { Date startDate = startDates.get(orgGuid); String server = servers.get(orgGuid); String name = names.get(orgGuid); String odsCode = odsCodes.get(orgGuid); String cdbNumber = cdbNumbers.get(orgGuid); Set<String> distinctPatientSet = distinctPatients.get(orgGuid); String startDateDesc = null; if (startDate != null) { startDateDesc = sdfOutput.format(startDate); } Long countDistinctPatients = null; if (distinctPatientSet != null) { countDistinctPatients = new Long(distinctPatientSet.size()); } sb.append("\n"); sb.append("\"" + name + "\""); sb.append(","); sb.append("\"" + odsCode + "\""); sb.append(","); sb.append("\"" + cdbNumber + "\""); sb.append(","); sb.append("\"" + orgGuid + "\""); sb.append(","); sb.append(startDateDesc); sb.append(","); sb.append("\"" + server + "\""); sb.append(","); sb.append(countDistinctPatients); } LOG.info(sb.toString()); FileUtils.writeStringToFile(new File(outputPath), sb.toString()); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Finding Start Dates in " + path + ", writing to " + outputPath); } private static void findEncounterTerms(String path, String outputPath) { LOG.info("Finding Encounter Terms from " + path); Map<String, Long> hmResults = new HashMap<>(); //source term, source term snomed ID, source term snomed term - count try { File root = new File(path); File[] files = root.listFiles(); for (File readerRoot: files) { //emis001 LOG.info("Finding terms in " + readerRoot); //first read in all the coding files to build up our map of codes Map<String, String> hmCodes = new HashMap<>(); for (File dateFolder: readerRoot.listFiles()) { LOG.info("Looking for codes in " + dateFolder); File f = findFile(dateFolder, "Coding_ClinicalCode"); if (f == null) { LOG.error("Failed to find coding file in " + dateFolder.getAbsolutePath()); continue; } CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String codeId = csvRecord.get("CodeId"); String term = csvRecord.get("Term"); String snomed = csvRecord.get("SnomedCTConceptId"); hmCodes.put(codeId, snomed + ",\"" + term + "\""); } csvParser.close(); } SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); Date cutoff = dateFormat.parse("2017-01-01"); //now process the consultation files themselves for (File dateFolder: readerRoot.listFiles()) { LOG.info("Looking for consultations in " + dateFolder); File f = findFile(dateFolder, "CareRecord_Consultation"); if (f == null) { LOG.error("Failed to find consultation file in " + dateFolder.getAbsolutePath()); continue; } CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String term = csvRecord.get("ConsultationSourceTerm"); String codeId = csvRecord.get("ConsultationSourceCodeId"); if (Strings.isNullOrEmpty(term) && Strings.isNullOrEmpty(codeId)) { continue; } String date = csvRecord.get("EffectiveDate"); if (Strings.isNullOrEmpty(date)) { continue; } Date d = dateFormat.parse(date); if (d.before(cutoff)) { continue; } String line = "\"" + term + "\","; if (!Strings.isNullOrEmpty(codeId)) { String codeLookup = hmCodes.get(codeId); if (codeLookup == null) { LOG.error("Failed to find lookup for codeID " + codeId); continue; } line += codeLookup; } else { line += ","; } Long count = hmResults.get(line); if (count == null) { count = new Long(1); } else { count = new Long(count.longValue() + 1); } hmResults.put(line, count); } csvParser.close(); } } //save results to file StringBuilder output = new StringBuilder(); output.append("\"consultation term\",\"snomed concept ID\",\"snomed term\",\"count\""); output.append("\r\n"); for (String line: hmResults.keySet()) { Long count = hmResults.get(line); String combined = line + "," + count; output.append(combined); output.append("\r\n"); } LOG.info("FInished"); LOG.info(output.toString()); FileUtils.writeStringToFile(new File(outputPath), output.toString()); LOG.info("written output to " + outputPath); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished finding Encounter Terms from " + path); } private static File findFile(File root, String token) throws Exception { for (File f: root.listFiles()) { String s = f.getName(); if (s.indexOf(token) > -1) { return f; } } return null; } /*private static void populateProtocolQueue(String serviceIdStr, String startingExchangeId) { LOG.info("Starting Populating Protocol Queue for " + serviceIdStr); ServiceDalI serviceRepository = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); if (serviceIdStr.equalsIgnoreCase("All")) { serviceIdStr = null; } try { List<Service> services = new ArrayList<>(); if (Strings.isNullOrEmpty(serviceIdStr)) { services = serviceRepository.getAll(); } else { UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); services.add(service); } for (Service service: services) { List<UUID> exchangeIds = auditRepository.getExchangeIdsForService(service.getId()); LOG.info("Found " + exchangeIds.size() + " exchangeIds for " + service.getName()); if (startingExchangeId != null) { UUID startingExchangeUuid = UUID.fromString(startingExchangeId); if (exchangeIds.contains(startingExchangeUuid)) { //if in the list, remove everything up to and including the starting exchange int index = exchangeIds.indexOf(startingExchangeUuid); LOG.info("Found starting exchange " + startingExchangeId + " at " + index + " so removing up to this point"); for (int i=index; i>=0; i--) { exchangeIds.remove(i); } startingExchangeId = null; } else { //if not in the list, skip all these exchanges LOG.info("List doesn't contain starting exchange " + startingExchangeId + " so skipping"); continue; } } QueueHelper.postToExchange(exchangeIds, "edsProtocol", null, true); } } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Populating Protocol Queue for " + serviceIdStr); }*/ /*private static void findDeletedOrgs() { LOG.info("Starting finding deleted orgs"); ServiceDalI serviceRepository = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); List<Service> services = new ArrayList<>(); try { for (Service service: serviceRepository.getAll()) { services.add(service); } } catch (Exception ex) { LOG.error("", ex); } services.sort((o1, o2) -> { String name1 = o1.getName(); String name2 = o2.getName(); return name1.compareToIgnoreCase(name2); }); for (Service service: services) { try { UUID serviceUuid = service.getId(); List<Exchange> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 1, new Date(0), new Date()); LOG.info("Service: " + service.getName() + " " + service.getLocalId()); if (exchangeByServices.isEmpty()) { LOG.info(" no exchange found!"); continue; } Exchange exchangeByService = exchangeByServices.get(0); UUID exchangeId = exchangeByService.getId(); Exchange exchange = auditRepository.getExchange(exchangeId); Map<String, String> headers = exchange.getHeaders(); String systemUuidStr = headers.get(HeaderKeys.SenderSystemUuid); UUID systemUuid = UUID.fromString(systemUuidStr); int batches = countBatches(exchangeId, serviceUuid, systemUuid); LOG.info(" Most recent exchange had " + batches + " batches"); if (batches > 1 && batches < 2000) { continue; } //go back until we find the FIRST exchange where it broke exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 250, new Date(0), new Date()); for (int i=0; i<exchangeByServices.size(); i++) { exchangeByService = exchangeByServices.get(i); exchangeId = exchangeByService.getId(); batches = countBatches(exchangeId, serviceUuid, systemUuid); exchange = auditRepository.getExchange(exchangeId); Date timestamp = exchange.getTimestamp(); if (batches < 1 || batches > 2000) { LOG.info(" " + timestamp + " had " + batches); } if (batches > 1 && batches < 2000) { LOG.info(" " + timestamp + " had " + batches); break; } } } catch (Exception ex) { LOG.error("", ex); } } LOG.info("Finished finding deleted orgs"); }*/ private static int countBatches(UUID exchangeId, UUID serviceId, UUID systemId) throws Exception { int batches = 0; ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId); for (ExchangeTransformAudit audit: audits) { if (audit.getNumberBatchesCreated() != null) { batches += audit.getNumberBatchesCreated(); } } return batches; } /*private static void fixExchanges(UUID justThisService) { LOG.info("Fixing exchanges"); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId : exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } boolean changed = false; String body = exchange.getBody(); String[] files = body.split("\n"); if (files.length == 0) { continue; } for (int i=0; i<files.length; i++) { String original = files[i]; //remove /r characters String trimmed = original.trim(); //add the new prefix if (!trimmed.startsWith("sftpreader/EMIS001/")) { trimmed = "sftpreader/EMIS001/" + trimmed; } if (!original.equals(trimmed)) { files[i] = trimmed; changed = true; } } if (changed) { LOG.info("Fixed exchange " + exchangeId); LOG.info(body); body = String.join("\n", files); exchange.setBody(body); AuditWriter.writeExchange(exchange); } } } LOG.info("Fixed exchanges"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void deleteDataForService(UUID serviceId) { Service dbService = new ServiceRepository().getById(serviceId); //the delete will take some time, so do the delete in a separate thread LOG.info("Deleting all data for service " + dbService.getName() + " " + dbService.getId()); FhirDeletionService deletor = new FhirDeletionService(dbService); try { deletor.deleteData(); LOG.info("Completed deleting all data for service " + dbService.getName() + " " + dbService.getId()); } catch (Exception ex) { LOG.error("Error deleting service " + dbService.getName() + " " + dbService.getId(), ex); } }*/ /*private static void fixProblems(UUID serviceId, String sharedStoragePath, boolean testMode) { LOG.info("Fixing problems for service " + serviceId); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); List<ExchangeByService> exchangeByServiceList = auditRepository.getExchangesByService(serviceId, Integer.MAX_VALUE); //go backwards as the most recent is first for (int i=exchangeByServiceList.size()-1; i>=0; i--) { ExchangeByService exchangeByService = exchangeByServiceList.get(i); UUID exchangeId = exchangeByService.getExchangeId(); LOG.info("Doing exchange " + exchangeId); EmisCsvHelper helper = null; try { Exchange exchange = AuditWriter.readExchange(exchangeId); String exchangeBody = exchange.getBody(); String[] files = exchangeBody.split(java.lang.System.lineSeparator()); File orgDirectory = validateAndFindCommonDirectory(sharedStoragePath, files); Map<Class, AbstractCsvParser> allParsers = new HashMap<>(); String properVersion = null; String[] versions = new String[]{EmisCsvToFhirTransformer.VERSION_5_0, EmisCsvToFhirTransformer.VERSION_5_1, EmisCsvToFhirTransformer.VERSION_5_3, EmisCsvToFhirTransformer.VERSION_5_4}; for (String version: versions) { try { List<AbstractCsvParser> parsers = new ArrayList<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(Observation.class, orgDirectory, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(DrugRecord.class, orgDirectory, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(IssueRecord.class, orgDirectory, version, true, parsers); for (AbstractCsvParser parser: parsers) { Class cls = parser.getClass(); allParsers.put(cls, parser); } properVersion = version; } catch (Exception ex) { //ignore } } if (allParsers.isEmpty()) { throw new Exception("Failed to open parsers for exchange " + exchangeId + " in folder " + orgDirectory); } UUID systemId = exchange.getHeaderAsUuid(HeaderKeys.SenderSystemUuid); //FhirResourceFiler dummyFiler = new FhirResourceFiler(exchangeId, serviceId, systemId, null, null, 10); if (helper == null) { helper = new EmisCsvHelper(findDataSharingAgreementGuid(new ArrayList<>(allParsers.values()))); } ObservationPreTransformer.transform(properVersion, allParsers, null, helper); IssueRecordPreTransformer.transform(properVersion, allParsers, null, helper); DrugRecordPreTransformer.transform(properVersion, allParsers, null, helper); Map<String, List<String>> problemChildren = helper.getProblemChildMap(); List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (Map.Entry<String, List<String>> entry : problemChildren.entrySet()) { String patientLocallyUniqueId = entry.getKey().split(":")[0]; UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientLocallyUniqueId); if (edsPatientId == null) { throw new Exception("Failed to find edsPatientId for local Patient ID " + patientLocallyUniqueId + " in exchange " + exchangeId); } //find the batch ID for our patient UUID batchId = null; for (ExchangeBatch exchangeBatch: exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null && exchangeBatch.getEdsPatientId().equals(edsPatientId)) { batchId = exchangeBatch.getBatchId(); break; } } if (batchId == null) { throw new Exception("Failed to find batch ID for eds Patient ID " + edsPatientId + " in exchange " + exchangeId); } //find the EDS ID for our problem UUID edsProblemId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Condition, entry.getKey()); if (edsProblemId == null) { LOG.warn("No edsProblemId found for local ID " + entry.getKey() + " - assume bad data referring to non-existing problem?"); //throw new Exception("Failed to find edsProblemId for local Patient ID " + problemLocallyUniqueId + " in exchange " + exchangeId); } //convert our child IDs to EDS references List<Reference> references = new ArrayList<>(); HashSet<String> contentsSet = new HashSet<>(); contentsSet.addAll(entry.getValue()); for (String referenceValue : contentsSet) { Reference reference = ReferenceHelper.createReference(referenceValue); ReferenceComponents components = ReferenceHelper.getReferenceComponents(reference); String locallyUniqueId = components.getId(); ResourceType resourceType = components.getResourceType(); UUID edsResourceId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); Reference globallyUniqueReference = ReferenceHelper.createReference(resourceType, edsResourceId.toString()); references.add(globallyUniqueReference); } //find the resource for the problem itself ResourceByExchangeBatch problemResourceByExchangeBatch = null; List<ResourceByExchangeBatch> resources = resourceRepository.getResourcesForBatch(batchId, ResourceType.Condition.toString()); for (ResourceByExchangeBatch resourceByExchangeBatch: resources) { if (resourceByExchangeBatch.getResourceId().equals(edsProblemId)) { problemResourceByExchangeBatch = resourceByExchangeBatch; break; } } if (problemResourceByExchangeBatch == null) { throw new Exception("Problem not found for edsProblemId " + edsProblemId + " for exchange " + exchangeId); } if (problemResourceByExchangeBatch.getIsDeleted()) { LOG.warn("Problem " + edsProblemId + " is deleted, so not adding to it for exchange " + exchangeId); continue; } String json = problemResourceByExchangeBatch.getResourceData(); Condition fhirProblem = (Condition)PARSER_POOL.parse(json); //update the problems if (fhirProblem.hasContained()) { if (fhirProblem.getContained().size() > 1) { throw new Exception("Problem " + edsProblemId + " is has " + fhirProblem.getContained().size() + " contained resources for exchange " + exchangeId); } fhirProblem.getContained().clear(); } List_ list = new List_(); list.setId("Items"); fhirProblem.getContained().add(list); Extension extension = ExtensionConverter.findExtension(fhirProblem, FhirExtensionUri.PROBLEM_ASSOCIATED_RESOURCE); if (extension == null) { Reference listReference = ReferenceHelper.createInternalReference("Items"); fhirProblem.addExtension(ExtensionConverter.createExtension(FhirExtensionUri.PROBLEM_ASSOCIATED_RESOURCE, listReference)); } for (Reference reference : references) { list.addEntry().setItem(reference); } String newJson = FhirSerializationHelper.serializeResource(fhirProblem); if (newJson.equals(json)) { LOG.warn("Skipping edsProblemId " + edsProblemId + " as JSON hasn't changed"); continue; } problemResourceByExchangeBatch.setResourceData(newJson); String resourceType = problemResourceByExchangeBatch.getResourceType(); UUID versionUuid = problemResourceByExchangeBatch.getVersion(); ResourceHistory problemResourceHistory = resourceRepository.getResourceHistoryByKey(edsProblemId, resourceType, versionUuid); problemResourceHistory.setResourceData(newJson); problemResourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); ResourceByService problemResourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType, edsProblemId); if (problemResourceByService.getResourceData() == null) { problemResourceByService = null; LOG.warn("Not updating edsProblemId " + edsProblemId + " for exchange " + exchangeId + " as it's been subsequently delrted"); } else { problemResourceByService.setResourceData(newJson); } //save back to THREE tables if (!testMode) { resourceRepository.save(problemResourceByExchangeBatch); resourceRepository.save(problemResourceHistory); if (problemResourceByService != null) { resourceRepository.save(problemResourceByService); } LOG.info("Fixed edsProblemId " + edsProblemId + " for exchange Id " + exchangeId); } else { LOG.info("Would change edsProblemId " + edsProblemId + " to new JSON"); LOG.info(newJson); } } } catch (Exception ex) { LOG.error("Failed on exchange " + exchangeId, ex); break; } } LOG.info("Finished fixing problems for service " + serviceId); } private static String findDataSharingAgreementGuid(List<AbstractCsvParser> parsers) throws Exception { //we need a file name to work out the data sharing agreement ID, so just the first file we can find File f = parsers .iterator() .next() .getFile(); String name = Files.getNameWithoutExtension(f.getName()); String[] toks = name.split("_"); if (toks.length != 5) { throw new TransformException("Failed to extract data sharing agreement GUID from filename " + f.getName()); } return toks[4]; } private static void closeParsers(Collection<AbstractCsvParser> parsers) { for (AbstractCsvParser parser : parsers) { try { parser.close(); } catch (IOException ex) { //don't worry if this fails, as we're done anyway } } } private static File validateAndFindCommonDirectory(String sharedStoragePath, String[] files) throws Exception { String organisationDir = null; for (String file: files) { File f = new File(sharedStoragePath, file); if (!f.exists()) { LOG.error("Failed to find file {} in shared storage {}", file, sharedStoragePath); throw new FileNotFoundException("" + f + " doesn't exist"); } //LOG.info("Successfully found file {} in shared storage {}", file, sharedStoragePath); try { File orgDir = f.getParentFile(); if (organisationDir == null) { organisationDir = orgDir.getAbsolutePath(); } else { if (!organisationDir.equalsIgnoreCase(orgDir.getAbsolutePath())) { throw new Exception(); } } } catch (Exception ex) { throw new FileNotFoundException("" + f + " isn't in the expected directory structure within " + organisationDir); } } return new File(organisationDir); }*/ /*private static void testLogging() { while (true) { System.out.println("Checking logging at " + System.currentTimeMillis()); try { Thread.sleep(4000); } catch (Exception e) { e.printStackTrace(); } LOG.trace("trace logging"); LOG.debug("debug logging"); LOG.info("info logging"); LOG.warn("warn logging"); LOG.error("error logging"); } } */ /*private static void fixExchangeProtocols() { LOG.info("Fixing exchange protocols"); AuditRepository auditRepository = new AuditRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.Exchange LIMIT 1000;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); LOG.info("Processing exchange " + exchangeId); Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } UUID serviceId = UUID.fromString(serviceIdStr); List<String> newIds = new ArrayList<>(); String protocolJson = headers.get(HeaderKeys.Protocols); if (!headers.containsKey(HeaderKeys.Protocols)) { try { List<LibraryItem> libraryItemList = LibraryRepositoryHelper.getProtocolsByServiceId(serviceIdStr); // Get protocols where service is publisher newIds = libraryItemList.stream() .filter( libraryItem -> libraryItem.getProtocol().getServiceContract().stream() .anyMatch(sc -> sc.getType().equals(ServiceContractType.PUBLISHER) && sc.getService().getUuid().equals(serviceIdStr))) .map(t -> t.getUuid().toString()) .collect(Collectors.toList()); } catch (Exception e) { LOG.error("Failed to find protocols for exchange " + exchange.getExchangeId(), e); continue; } } else { try { JsonNode node = ObjectMapperPool.getInstance().readTree(protocolJson); for (int i = 0; i < node.size(); i++) { JsonNode libraryItemNode = node.get(i); JsonNode idNode = libraryItemNode.get("uuid"); String id = idNode.asText(); newIds.add(id); } } catch (Exception e) { LOG.error("Failed to read Json from " + protocolJson + " for exchange " + exchange.getExchangeId(), e); continue; } } try { if (newIds.isEmpty()) { headers.remove(HeaderKeys.Protocols); } else { String protocolsJson = ObjectMapperPool.getInstance().writeValueAsString(newIds.toArray()); headers.put(HeaderKeys.Protocols, protocolsJson); } } catch (JsonProcessingException e) { LOG.error("Unable to serialize protocols to JSON for exchange " + exchange.getExchangeId(), e); continue; } try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(headerJson); } catch (JsonProcessingException e) { LOG.error("Failed to write exchange headers to Json for exchange " + exchange.getExchangeId(), e); continue; } auditRepository.save(exchange); } LOG.info("Finished fixing exchange protocols"); }*/ /*private static void fixExchangeHeaders() { LOG.info("Fixing exchange headers"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); OrganisationRepository organisationRepository = new OrganisationRepository(); List<Exchange> exchanges = new AuditRepository().getAllExchanges(); for (Exchange exchange: exchanges) { String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } if (headers.containsKey(HeaderKeys.SenderLocalIdentifier) && headers.containsKey(HeaderKeys.SenderOrganisationUuid)) { continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); Map<UUID, String> orgMap = service.getOrganisations(); if (orgMap.size() != 1) { LOG.error("Wrong number of orgs in service " + serviceId + " for exchange " + exchange.getExchangeId()); continue; } UUID orgId = orgMap .keySet() .stream() .collect(StreamExtension.firstOrNullCollector()); Organisation organisation = organisationRepository.getById(orgId); String odsCode = organisation.getNationalId(); headers.put(HeaderKeys.SenderLocalIdentifier, odsCode); headers.put(HeaderKeys.SenderOrganisationUuid, orgId.toString()); try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setHeaders(headerJson); auditRepository.save(exchange); LOG.info("Creating exchange " + exchange.getExchangeId()); } LOG.info("Finished fixing exchange headers"); }*/ /*private static void fixExchangeHeaders() { LOG.info("Fixing exchange headers"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); OrganisationRepository organisationRepository = new OrganisationRepository(); LibraryRepository libraryRepository = new LibraryRepository(); List<Exchange> exchanges = new AuditRepository().getAllExchanges(); for (Exchange exchange: exchanges) { String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } boolean changed = false; UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); try { List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint : endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString(); ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId); Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId()); LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent()); System system = libraryItem.getSystem(); for (TechnicalInterface technicalInterface : system.getTechnicalInterface()) { if (endpointInterfaceId.equals(technicalInterface.getUuid())) { if (!headers.containsKey(HeaderKeys.SourceSystem)) { headers.put(HeaderKeys.SourceSystem, technicalInterface.getMessageFormat()); changed = true; } if (!headers.containsKey(HeaderKeys.SystemVersion)) { headers.put(HeaderKeys.SystemVersion, technicalInterface.getMessageFormatVersion()); changed = true; } if (!headers.containsKey(HeaderKeys.SenderSystemUuid)) { headers.put(HeaderKeys.SenderSystemUuid, endpointSystemId.toString()); changed = true; } } } } } catch (Exception e) { LOG.error("Failed to find endpoint details for " + exchange.getExchangeId()); continue; } if (changed) { try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setHeaders(headerJson); auditRepository.save(exchange); LOG.info("Fixed exchange " + exchange.getExchangeId()); } } LOG.info("Finished fixing exchange headers"); }*/ /*private static void testConnection(String configName) { try { JsonNode config = ConfigManager.getConfigurationAsJson(configName, "enterprise"); String driverClass = config.get("driverClass").asText(); String url = config.get("url").asText(); String username = config.get("username").asText(); String password = config.get("password").asText(); //force the driver to be loaded Class.forName(driverClass); Connection conn = DriverManager.getConnection(url, username, password); conn.setAutoCommit(false); LOG.info("Connection ok"); conn.close(); } catch (Exception e) { LOG.error("", e); } }*/ /*private static void testConnection() { try { JsonNode config = ConfigManager.getConfigurationAsJson("postgres", "enterprise"); String url = config.get("url").asText(); String username = config.get("username").asText(); String password = config.get("password").asText(); //force the driver to be loaded Class.forName("org.postgresql.Driver"); Connection conn = DriverManager.getConnection(url, username, password); conn.setAutoCommit(false); LOG.info("Connection ok"); conn.close(); } catch (Exception e) { LOG.error("", e); } }*/ /*private static void startEnterpriseStream(UUID serviceId, String configName, UUID exchangeIdStartFrom, UUID batchIdStartFrom) throws Exception { LOG.info("Starting Enterprise Streaming for " + serviceId + " using " + configName + " starting from exchange " + exchangeIdStartFrom + " and batch " + batchIdStartFrom); LOG.info("Testing database connection"); testConnection(configName); Service service = new ServiceRepository().getById(serviceId); List<UUID> orgIds = new ArrayList<>(service.getOrganisations().keySet()); UUID orgId = orgIds.get(0); List<ExchangeByService> exchangeByServiceList = new AuditRepository().getExchangesByService(serviceId, Integer.MAX_VALUE); for (int i=exchangeByServiceList.size()-1; i>=0; i--) { ExchangeByService exchangeByService = exchangeByServiceList.get(i); //for (ExchangeByService exchangeByService: exchangeByServiceList) { UUID exchangeId = exchangeByService.getExchangeId(); if (exchangeIdStartFrom != null) { if (!exchangeIdStartFrom.equals(exchangeId)) { continue; } else { //once we have a match, set to null so we don't skip any subsequent ones exchangeIdStartFrom = null; } } Exchange exchange = AuditWriter.readExchange(exchangeId); String senderOrgUuidStr = exchange.getHeader(HeaderKeys.SenderOrganisationUuid); UUID senderOrgUuid = UUID.fromString(senderOrgUuidStr); //this one had 90,000 batches and doesn't need doing again *//*if (exchangeId.equals(UUID.fromString("b9b93be0-afd8-11e6-8c16-c1d5a00342f3"))) { LOG.info("Skipping exchange " + exchangeId); continue; }*//* List<ExchangeBatch> exchangeBatches = new ExchangeBatchRepository().retrieveForExchangeId(exchangeId); LOG.info("Processing exchange " + exchangeId + " with " + exchangeBatches.size() + " batches"); for (int j=0; j<exchangeBatches.size(); j++) { ExchangeBatch exchangeBatch = exchangeBatches.get(j); UUID batchId = exchangeBatch.getBatchId(); if (batchIdStartFrom != null) { if (!batchIdStartFrom.equals(batchId)) { continue; } else { batchIdStartFrom = null; } } LOG.info("Processing exchange " + exchangeId + " and batch " + batchId + " " + (j+1) + "/" + exchangeBatches.size()); try { String outbound = FhirToEnterpriseCsvTransformer.transformFromFhir(senderOrgUuid, batchId, null); if (!Strings.isNullOrEmpty(outbound)) { EnterpriseFiler.file(outbound, configName); } } catch (Exception ex) { throw new PipelineException("Failed to process exchange " + exchangeId + " and batch " + batchId, ex); } } } }*/ /*private static void fixMissingExchanges() { LOG.info("Fixing missing exchanges"); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id, batch_id, inserted_at FROM ehr.exchange_batch LIMIT 600000;"); stmt.setFetchSize(100); Set<UUID> exchangeIdsDone = new HashSet<>(); AuditRepository auditRepository = new AuditRepository(); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); UUID batchId = row.get(1, UUID.class); Date date = row.getTimestamp(2); //LOG.info("Exchange " + exchangeId + " batch " + batchId + " date " + date); if (exchangeIdsDone.contains(exchangeId)) { continue; } if (auditRepository.getExchange(exchangeId) != null) { continue; } UUID serviceId = findServiceId(batchId, session); if (serviceId == null) { continue; } Exchange exchange = new Exchange(); ExchangeByService exchangeByService = new ExchangeByService(); ExchangeEvent exchangeEvent = new ExchangeEvent(); Map<String, String> headers = new HashMap<>(); headers.put(HeaderKeys.SenderServiceUuid, serviceId.toString()); String headersJson = null; try { headersJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setBody("Body not available, as exchange re-created"); exchange.setExchangeId(exchangeId); exchange.setHeaders(headersJson); exchange.setTimestamp(date); exchangeByService.setExchangeId(exchangeId); exchangeByService.setServiceId(serviceId); exchangeByService.setTimestamp(date); exchangeEvent.setEventDesc("Created_By_Conversion"); exchangeEvent.setExchangeId(exchangeId); exchangeEvent.setTimestamp(new Date()); auditRepository.save(exchange); auditRepository.save(exchangeEvent); auditRepository.save(exchangeByService); exchangeIdsDone.add(exchangeId); LOG.info("Creating exchange " + exchangeId); } LOG.info("Finished exchange fix"); } private static UUID findServiceId(UUID batchId, Session session) { Statement stmt = new SimpleStatement("select resource_type, resource_id from ehr.resource_by_exchange_batch where batch_id = " + batchId + " LIMIT 1;"); ResultSet rs = session.execute(stmt); if (rs.isExhausted()) { LOG.error("Failed to find resource_by_exchange_batch for batch_id " + batchId); return null; } Row row = rs.one(); String resourceType = row.getString(0); UUID resourceId = row.get(1, UUID.class); stmt = new SimpleStatement("select service_id from ehr.resource_history where resource_type = '" + resourceType + "' and resource_id = " + resourceId + " LIMIT 1;"); rs = session.execute(stmt); if (rs.isExhausted()) { LOG.error("Failed to find resource_history for resource_type " + resourceType + " and resource_id " + resourceId); return null; } row = rs.one(); UUID serviceId = row.get(0, UUID.class); return serviceId; }*/ /*private static void fixExchangeEvents() { List<ExchangeEvent> events = new AuditRepository().getAllExchangeEvents(); for (ExchangeEvent event: events) { if (event.getEventDesc() != null) { continue; } String eventDesc = ""; int eventType = event.getEvent().intValue(); switch (eventType) { case 1: eventDesc = "Receive"; break; case 2: eventDesc = "Validate"; break; case 3: eventDesc = "Transform_Start"; break; case 4: eventDesc = "Transform_End"; break; case 5: eventDesc = "Send"; break; default: eventDesc = "??? " + eventType; } event.setEventDesc(eventDesc); new AuditRepository().save(null, event); } }*/ /*private static void fixExchanges() { AuditRepository auditRepository = new AuditRepository(); Map<UUID, Set<UUID>> existingOnes = new HashMap(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); List<Exchange> exchanges = auditRepository.getAllExchanges(); for (Exchange exchange: exchanges) { UUID exchangeUuid = exchange.getExchangeId(); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeUuid + " and Json " + headerJson); continue; } *//*String serviceId = headers.get(HeaderKeys.SenderServiceUuid); if (serviceId == null) { LOG.warn("No service ID found for exchange " + exchange.getExchangeId()); continue; } UUID serviceUuid = UUID.fromString(serviceId); Set<UUID> exchangeIdsDone = existingOnes.get(serviceUuid); if (exchangeIdsDone == null) { exchangeIdsDone = new HashSet<>(); List<ExchangeByService> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, Integer.MAX_VALUE); for (ExchangeByService exchangeByService: exchangeByServices) { exchangeIdsDone.add(exchangeByService.getExchangeId()); } existingOnes.put(serviceUuid, exchangeIdsDone); } //create the exchange by service entity if (!exchangeIdsDone.contains(exchangeUuid)) { Date timestamp = exchange.getTimestamp(); ExchangeByService newOne = new ExchangeByService(); newOne.setExchangeId(exchangeUuid); newOne.setServiceId(serviceUuid); newOne.setTimestamp(timestamp); auditRepository.save(newOne); }*//* try { headers.remove(HeaderKeys.BatchIdsJson); String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(newHeaderJson); auditRepository.save(exchange); } catch (JsonProcessingException e) { LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e); } if (!headers.containsKey(HeaderKeys.BatchIdsJson)) { //fix the batch IDs not being in the exchange List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeUuid); if (!batches.isEmpty()) { List<UUID> batchUuids = batches .stream() .map(t -> t.getBatchId()) .collect(Collectors.toList()); try { String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchUuids.toArray()); headers.put(HeaderKeys.BatchIdsJson, batchUuidsStr); String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(newHeaderJson); auditRepository.save(exchange, null); } catch (JsonProcessingException e) { LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e); } } //} } }*/ /*private static UUID findSystemId(Service service, String software, String messageVersion) throws PipelineException { List<JsonServiceInterfaceEndpoint> endpoints = null; try { endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString(); LibraryRepository libraryRepository = new LibraryRepository(); ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId); Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId()); LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent()); System system = libraryItem.getSystem(); for (TechnicalInterface technicalInterface: system.getTechnicalInterface()) { if (endpointInterfaceId.equals(technicalInterface.getUuid()) && technicalInterface.getMessageFormat().equalsIgnoreCase(software) && technicalInterface.getMessageFormatVersion().equalsIgnoreCase(messageVersion)) { return endpointSystemId; } } } } catch (Exception e) { throw new PipelineException("Failed to process endpoints from service " + service.getId()); } return null; } */ /*private static void addSystemIdToExchangeHeaders() throws Exception { LOG.info("populateExchangeBatchPatients"); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); ServiceRepository serviceRepository = new ServiceRepository(); //OrganisationRepository organisationRepository = new OrganisationRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson); continue; } if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))) { LOG.info("Skipping exchange " + exchangeId + " as no service UUID"); continue; } if (!Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) { LOG.info("Skipping exchange " + exchangeId + " as already got system UUID"); continue; } try { //work out service ID String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); UUID serviceId = UUID.fromString(serviceIdStr); String software = headers.get(HeaderKeys.SourceSystem); String version = headers.get(HeaderKeys.SystemVersion); Service service = serviceRepository.getById(serviceId); UUID systemUuid = findSystemId(service, software, version); headers.put(HeaderKeys.SenderSystemUuid, systemUuid.toString()); //work out protocol IDs try { String newProtocolIdsJson = DetermineRelevantProtocolIds.getProtocolIdsForPublisherService(serviceIdStr); headers.put(HeaderKeys.ProtocolIds, newProtocolIdsJson); } catch (Exception ex) { LOG.error("Failed to recalculate protocols for " + exchangeId + ": " + ex.getMessage()); } //save to DB headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(headerJson); auditRepository.save(exchange); } catch (Exception ex) { LOG.error("Error with exchange " + exchangeId, ex); } } LOG.info("Finished populateExchangeBatchPatients"); }*/ /*private static void populateExchangeBatchPatients() throws Exception { LOG.info("populateExchangeBatchPatients"); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); //ServiceRepository serviceRepository = new ServiceRepository(); //OrganisationRepository organisationRepository = new OrganisationRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson); continue; } if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid)) || Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) { LOG.info("Skipping exchange " + exchangeId + " because no service or system in header"); continue; } try { UUID serviceId = UUID.fromString(headers.get(HeaderKeys.SenderServiceUuid)); UUID systemId = UUID.fromString(headers.get(HeaderKeys.SenderSystemUuid)); List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch : exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null) { continue; } UUID batchId = exchangeBatch.getBatchId(); List<ResourceByExchangeBatch> resourceWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Patient.toString()); if (resourceWrappers.isEmpty()) { continue; } List<UUID> patientIds = new ArrayList<>(); for (ResourceByExchangeBatch resourceWrapper : resourceWrappers) { UUID patientId = resourceWrapper.getResourceId(); if (resourceWrapper.getIsDeleted()) { deleteEntirePatientRecord(patientId, serviceId, systemId, exchangeId, batchId); } if (!patientIds.contains(patientId)) { patientIds.add(patientId); } } if (patientIds.size() != 1) { LOG.info("Skipping exchange " + exchangeId + " and batch " + batchId + " because found " + patientIds.size() + " patient IDs"); continue; } UUID patientId = patientIds.get(0); exchangeBatch.setEdsPatientId(patientId); exchangeBatchRepository.save(exchangeBatch); } } catch (Exception ex) { LOG.error("Error with exchange " + exchangeId, ex); } } LOG.info("Finished populateExchangeBatchPatients"); } private static void deleteEntirePatientRecord(UUID patientId, UUID serviceId, UUID systemId, UUID exchangeId, UUID batchId) throws Exception { FhirStorageService storageService = new FhirStorageService(serviceId, systemId); ResourceRepository resourceRepository = new ResourceRepository(); List<ResourceByPatient> resourceWrappers = resourceRepository.getResourcesByPatient(serviceId, systemId, patientId); for (ResourceByPatient resourceWrapper: resourceWrappers) { String json = resourceWrapper.getResourceData(); Resource resource = new JsonParser().parse(json); storageService.exchangeBatchDelete(exchangeId, batchId, resource); } }*/ /*private static void convertPatientSearch() { LOG.info("Converting Patient Search"); ResourceRepository resourceRepository = new ResourceRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); LOG.info("Doing service " + service.getName()); for (UUID systemId : findSystemIds(service)) { List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.EpisodeOfCare.toString()); for (ResourceByService resourceWrapper: resourceWrappers) { if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) { continue; } try { EpisodeOfCare episodeOfCare = (EpisodeOfCare) new JsonParser().parse(resourceWrapper.getResourceData()); String patientId = ReferenceHelper.getReferenceId(episodeOfCare.getPatient()); ResourceHistory patientWrapper = resourceRepository.getCurrentVersion(ResourceType.Patient.toString(), UUID.fromString(patientId)); if (Strings.isNullOrEmpty(patientWrapper.getResourceData())) { continue; } Patient patient = (Patient) new JsonParser().parse(patientWrapper.getResourceData()); PatientSearchHelper.update(serviceId, systemId, patient); PatientSearchHelper.update(serviceId, systemId, episodeOfCare); } catch (Exception ex) { LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex); } } } } LOG.info("Converted Patient Search"); } catch (Exception ex) { LOG.error("", ex); } }*/ private static List<UUID> findSystemIds(Service service) throws Exception { List<UUID> ret = new ArrayList<>(); List<JsonServiceInterfaceEndpoint> endpoints = null; try { endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); ret.add(endpointSystemId); } } catch (Exception e) { throw new Exception("Failed to process endpoints from service " + service.getId()); } return ret; } /*private static void convertPatientLink() { LOG.info("Converting Patient Link"); ResourceRepository resourceRepository = new ResourceRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); LOG.info("Doing service " + service.getName()); for (UUID systemId : findSystemIds(service)) { List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.Patient.toString()); for (ResourceByService resourceWrapper: resourceWrappers) { if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) { continue; } try { Patient patient = (Patient)new JsonParser().parse(resourceWrapper.getResourceData()); PatientLinkHelper.updatePersonId(patient); } catch (Exception ex) { LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex); } } } } LOG.info("Converted Patient Link"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixConfidentialPatients(String sharedStoragePath, UUID justThisService) { LOG.info("Fixing Confidential Patients using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager(); Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class); Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); Map<String, ResourceHistory> resourcesFixed = new HashMap<>(); Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Set<UUID> batchIdsToPutInProtocolQueue = new HashSet<>(); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f); EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId); ResourceFiler filer = new ResourceFiler(exchangeId, serviceId, systemId, null, null, 1); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers); ProblemPreTransformer.transform(version, parsers, filer, helper); ObservationPreTransformer.transform(version, parsers, filer, helper); DrugRecordPreTransformer.transform(version, parsers, filer, helper); IssueRecordPreTransformer.transform(version, parsers, filer, helper); DiaryPreTransformer.transform(version, parsers, filer, helper); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient)parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getIsConfidential() && !patientParser.getDeleted()) { PatientTransformer.createResource(patientParser, filer, helper, version); } } patientParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class); while (consultationParser.nextRecord()) { if (consultationParser.getIsConfidential() && !consultationParser.getDeleted()) { ConsultationTransformer.createResource(consultationParser, filer, helper, version); } } consultationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { if (observationParser.getIsConfidential() && !observationParser.getDeleted()) { ObservationTransformer.createResource(observationParser, filer, helper, version); } } observationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class); while (diaryParser.nextRecord()) { if (diaryParser.getIsConfidential() && !diaryParser.getDeleted()) { DiaryTransformer.createResource(diaryParser, filer, helper, version); } } diaryParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class); while (drugRecordParser.nextRecord()) { if (drugRecordParser.getIsConfidential() && !drugRecordParser.getDeleted()) { DrugRecordTransformer.createResource(drugRecordParser, filer, helper, version); } } drugRecordParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class); while (issueRecordParser.nextRecord()) { if (issueRecordParser.getIsConfidential() && !issueRecordParser.getDeleted()) { IssueRecordTransformer.createResource(issueRecordParser, filer, helper, version); } } issueRecordParser.close(); filer.waitToFinish(); //just to close the thread pool, even though it's not been used List<Resource> resources = filer.getNewResources(); for (Resource resource: resources) { String patientId = IdHelper.getPatientId(resource); UUID edsPatientId = UUID.fromString(patientId); ResourceType resourceType = resource.getResourceType(); UUID resourceId = UUID.fromString(resource.getId()); boolean foundResourceInDbBatch = false; List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds != null) { for (UUID batchId : batchIds) { List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), resourceId); if (resourceByExchangeBatches.isEmpty()) { //if we've deleted data, this will be null continue; } foundResourceInDbBatch = true; for (ResourceByExchangeBatch resourceByExchangeBatch : resourceByExchangeBatches) { String json = resourceByExchangeBatch.getResourceData(); if (!Strings.isNullOrEmpty(json)) { LOG.warn("JSON already in resource " + resourceType + " " + resourceId); } else { json = parserPool.composeString(resource); resourceByExchangeBatch.setResourceData(json); resourceByExchangeBatch.setIsDeleted(false); resourceByExchangeBatch.setSchemaVersion("0.1"); LOG.info("Saved resource by batch " + resourceType + " " + resourceId + " in batch " + batchId); UUID versionUuid = resourceByExchangeBatch.getVersion(); ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(resourceId, resourceType.toString(), versionUuid); if (resourceHistory == null) { throw new Exception("Failed to find resource history for " + resourceType + " " + resourceId + " and version " + versionUuid); } resourceHistory.setIsDeleted(false); resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); resourceHistory.setSchemaVersion("0.1"); resourceRepository.save(resourceByExchangeBatch); resourceRepository.save(resourceHistory); batchIdsToPutInProtocolQueue.add(batchId); String key = resourceType.toString() + ":" + resourceId; resourcesFixed.put(key, resourceHistory); } //if a patient became confidential, we will have deleted all resources for that //patient, so we need to undo that too //to undelete WHOLE patient record //1. if THIS resource is a patient //2. get all other deletes from the same exchange batch //3. delete those from resource_by_exchange_batch (the deleted ones only) //4. delete same ones from resource_history //5. retrieve most recent resource_history //6. if not deleted, add to resources fixed if (resourceType == ResourceType.Patient) { List<ResourceByExchangeBatch> resourcesInSameBatch = resourceRepository.getResourcesForBatch(batchId); LOG.info("Undeleting " + resourcesInSameBatch.size() + " resources for batch " + batchId); for (ResourceByExchangeBatch resourceInSameBatch: resourcesInSameBatch) { if (!resourceInSameBatch.getIsDeleted()) { continue; } //patient and episode resources will be restored by the above stuff, so don't try //to do it again if (resourceInSameBatch.getResourceType().equals(ResourceType.Patient.toString()) || resourceInSameBatch.getResourceType().equals(ResourceType.EpisodeOfCare.toString())) { continue; } ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(resourceInSameBatch.getResourceId(), resourceInSameBatch.getResourceType(), resourceInSameBatch.getVersion()); mapperResourceByExchangeBatch.delete(resourceInSameBatch); mapperResourceHistory.delete(deletedResourceHistory); batchIdsToPutInProtocolQueue.add(batchId); //check the most recent version of our resource, and if it's not deleted, add to the list to update the resource_by_service table ResourceHistory mostRecentDeletedResourceHistory = resourceRepository.getCurrentVersion(resourceInSameBatch.getResourceType(), resourceInSameBatch.getResourceId()); if (mostRecentDeletedResourceHistory != null && !mostRecentDeletedResourceHistory.getIsDeleted()) { String key2 = mostRecentDeletedResourceHistory.getResourceType().toString() + ":" + mostRecentDeletedResourceHistory.getResourceId(); resourcesFixed.put(key2, mostRecentDeletedResourceHistory); } } } } } } //if we didn't find records in the DB to update, then if (!foundResourceInDbBatch) { //we can't generate a back-dated time UUID, but we need one so the resource_history //table is in order. To get a suitable time UUID, we just pull out the first exchange batch for our exchange, //and the batch ID is actually a time UUID that was allocated around the right time ExchangeBatch firstBatch = exchangeBatchRepository.retrieveFirstForExchangeId(exchangeId); //if there was no batch for the exchange, then the exchange wasn't processed at all. So skip this exchange //and we'll pick up the same patient data in a following exchange if (firstBatch == null) { continue; } UUID versionUuid = firstBatch.getBatchId(); //find suitable batch ID UUID batchId = null; if (batchIds != null && batchIds.size() > 0) { batchId = batchIds.get(batchIds.size()-1); } else { //create new batch ID if not found ExchangeBatch exchangeBatch = new ExchangeBatch(); exchangeBatch.setBatchId(UUIDs.timeBased()); exchangeBatch.setExchangeId(exchangeId); exchangeBatch.setInsertedAt(new Date()); exchangeBatch.setEdsPatientId(edsPatientId); exchangeBatchRepository.save(exchangeBatch); batchId = exchangeBatch.getBatchId(); //add to map for next resource if (batchIds == null) { batchIds = new ArrayList<>(); } batchIds.add(batchId); batchesPerPatient.put(edsPatientId, batchIds); } String json = parserPool.composeString(resource); ResourceHistory resourceHistory = new ResourceHistory(); resourceHistory.setResourceId(resourceId); resourceHistory.setResourceType(resourceType.toString()); resourceHistory.setVersion(versionUuid); resourceHistory.setCreatedAt(new Date()); resourceHistory.setServiceId(serviceId); resourceHistory.setSystemId(systemId); resourceHistory.setIsDeleted(false); resourceHistory.setSchemaVersion("0.1"); resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); ResourceByExchangeBatch resourceByExchangeBatch = new ResourceByExchangeBatch(); resourceByExchangeBatch.setBatchId(batchId); resourceByExchangeBatch.setExchangeId(exchangeId); resourceByExchangeBatch.setResourceType(resourceType.toString()); resourceByExchangeBatch.setResourceId(resourceId); resourceByExchangeBatch.setVersion(versionUuid); resourceByExchangeBatch.setIsDeleted(false); resourceByExchangeBatch.setSchemaVersion("0.1"); resourceByExchangeBatch.setResourceData(json); resourceRepository.save(resourceHistory); resourceRepository.save(resourceByExchangeBatch); batchIdsToPutInProtocolQueue.add(batchId); } } if (!batchIdsToPutInProtocolQueue.isEmpty()) { exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchIdsToPutInProtocolQueue); } } //update the resource_by_service table (and the resource_by_patient view) for (ResourceHistory resourceHistory: resourcesFixed.values()) { UUID latestVersionUpdatedUuid = resourceHistory.getVersion(); ResourceHistory latestVersion = resourceRepository.getCurrentVersion(resourceHistory.getResourceType(), resourceHistory.getResourceId()); UUID latestVersionUuid = latestVersion.getVersion(); //if there have been subsequent updates to the resource, then skip it if (!latestVersionUuid.equals(latestVersionUpdatedUuid)) { continue; } Resource resource = parserPool.parse(resourceHistory.getResourceData()); ResourceMetadata metadata = MetadataFactory.createMetadata(resource); UUID patientId = ((PatientCompartment)metadata).getPatientId(); ResourceByService resourceByService = new ResourceByService(); resourceByService.setServiceId(resourceHistory.getServiceId()); resourceByService.setSystemId(resourceHistory.getSystemId()); resourceByService.setResourceType(resourceHistory.getResourceType()); resourceByService.setResourceId(resourceHistory.getResourceId()); resourceByService.setCurrentVersion(resourceHistory.getVersion()); resourceByService.setUpdatedAt(resourceHistory.getCreatedAt()); resourceByService.setPatientId(patientId); resourceByService.setSchemaVersion(resourceHistory.getSchemaVersion()); resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata)); resourceByService.setResourceData(resourceHistory.getResourceData()); resourceRepository.save(resourceByService); //call out to our patient search and person matching services if (resource instanceof Patient) { PatientLinkHelper.updatePersonId((Patient)resource); PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (Patient)resource); } else if (resource instanceof EpisodeOfCare) { PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (EpisodeOfCare)resource); } } if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) { //find the config for our protocol queue String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) { Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } } LOG.info("Finished Fixing Confidential Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixDeletedAppointments(String sharedStoragePath, boolean saveChanges, UUID justThisService) { LOG.info("Fixing Deleted Appointments using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager(); Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class); Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class, dir, version, true, parsers); //find any deleted patients List<UUID> deletedPatientUuids = new ArrayList<>(); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getDeleted()) { //find the EDS patient ID for this local guid String patientGuid = patientParser.getPatientGuid(); UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid); } deletedPatientUuids.add(edsPatientId); } } patientParser.close(); //go through the appts file to find properly deleted appt GUIDS List<UUID> deletedApptUuids = new ArrayList<>(); org.endeavourhealth.transform.emis.csv.schema.appointment.Slot apptParser = (org.endeavourhealth.transform.emis.csv.schema.appointment.Slot) parsers.get(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class); while (apptParser.nextRecord()) { if (apptParser.getDeleted()) { String patientGuid = apptParser.getPatientGuid(); String slotGuid = apptParser.getSlotGuid(); if (!Strings.isNullOrEmpty(patientGuid)) { String uniqueLocalId = EmisCsvHelper.createUniqueId(patientGuid, slotGuid); UUID edsApptId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Appointment, uniqueLocalId); deletedApptUuids.add(edsApptId); } } } apptParser.close(); for (UUID edsPatientId : deletedPatientUuids) { List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds == null) { //if there are no batches for this patient, we'll be handling this data in another exchange continue; } for (UUID batchId : batchIds) { List<ResourceByExchangeBatch> apptWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Appointment.toString()); for (ResourceByExchangeBatch apptWrapper : apptWrappers) { //ignore non-deleted appts if (!apptWrapper.getIsDeleted()) { continue; } //if the appt was deleted legitamately, then skip it UUID apptId = apptWrapper.getResourceId(); if (deletedApptUuids.contains(apptId)) { continue; } ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(apptWrapper.getResourceId(), apptWrapper.getResourceType(), apptWrapper.getVersion()); if (saveChanges) { mapperResourceByExchangeBatch.delete(apptWrapper); mapperResourceHistory.delete(deletedResourceHistory); } LOG.info("Un-deleted " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " in batch " + batchId + " patient " + edsPatientId); //now get the most recent instance of the appointment, and if it's NOT deleted, insert into the resource_by_service table ResourceHistory mostRecentResourceHistory = resourceRepository.getCurrentVersion(apptWrapper.getResourceType(), apptWrapper.getResourceId()); if (mostRecentResourceHistory != null && !mostRecentResourceHistory.getIsDeleted()) { Resource resource = parserPool.parse(mostRecentResourceHistory.getResourceData()); ResourceMetadata metadata = MetadataFactory.createMetadata(resource); UUID patientId = ((PatientCompartment) metadata).getPatientId(); ResourceByService resourceByService = new ResourceByService(); resourceByService.setServiceId(mostRecentResourceHistory.getServiceId()); resourceByService.setSystemId(mostRecentResourceHistory.getSystemId()); resourceByService.setResourceType(mostRecentResourceHistory.getResourceType()); resourceByService.setResourceId(mostRecentResourceHistory.getResourceId()); resourceByService.setCurrentVersion(mostRecentResourceHistory.getVersion()); resourceByService.setUpdatedAt(mostRecentResourceHistory.getCreatedAt()); resourceByService.setPatientId(patientId); resourceByService.setSchemaVersion(mostRecentResourceHistory.getSchemaVersion()); resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata)); resourceByService.setResourceData(mostRecentResourceHistory.getResourceData()); if (saveChanges) { resourceRepository.save(resourceByService); } LOG.info("Restored " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " to resource_by_service table"); } } } } } } LOG.info("Finished Deleted Appointments Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ private static void fixSlotReferencesForPublisher(String publisher) { try { ServiceDalI dal = DalProvider.factoryServiceDal(); List<Service> services = dal.getAll(); for (Service service: services) { if (service.getPublisherConfigName() != null && service.getPublisherConfigName().equals(publisher)) { fixSlotReferences(service.getId()); } } } catch (Exception ex) { LOG.error("", ex); } } private static void fixSlotReferences(UUID serviceId) { LOG.info("Fixing Slot References in Appointments for " + serviceId); try { //get patient IDs from patient search List<UUID> patientIds = new ArrayList<>(); EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); String sql = "SELECT eds_id FROM publisher_transform_02.resource_id_map WHERE service_id = '" + serviceId + "'AND resource_type = '" + ResourceType.Patient + "';"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { String patientUuid = rs.getString(1); patientIds.add(UUID.fromString(patientUuid)); } rs.close(); statement.close(); connection.close(); /* EntityManager entityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); String sql = "SELECT patient_id FROM patient_search WHERE service_id = '" + serviceId.toString() + "'"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { String patientUuid = rs.getString(1); patientIds.add(UUID.fromString(patientUuid)); } rs.close(); statement.close(); connection.close();*/ LOG.debug("Found " + patientIds.size() + " patients"); int done = 0; int fixed = 0; ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, null, null, null, true); //for each patient for (UUID patientUuid: patientIds) { //LOG.debug("Checking patient " + patientUuid); //get all appointment resources List<ResourceWrapper> appointmentWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.Appointment.toString()); for (ResourceWrapper apptWrapper: appointmentWrappers) { //LOG.debug("Checking appointment " + apptWrapper.getResourceId()); List<ResourceWrapper> historyWrappers = resourceDal.getResourceHistory(serviceId, apptWrapper.getResourceType(), apptWrapper.getResourceId()); //the above returns most recent first, but we want to do them in order historyWrappers = Lists.reverse(historyWrappers); for (ResourceWrapper historyWrapper : historyWrappers) { if (historyWrapper.isDeleted()) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " is deleted"); continue; } String json = historyWrapper.getResourceData(); Appointment appt = (Appointment) FhirSerializationHelper.deserializeResource(json); if (!appt.hasSlot()) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " has no slot"); continue; } if (appt.getSlot().size() != 1) { throw new Exception("Appointment " + appt.getId() + " has " + appt.getSlot().size() + " slot refs"); } Reference slotRef = appt.getSlot().get(0); //test if slot reference exists Reference slotLocalRef = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, slotRef); String slotSourceId = ReferenceHelper.getReferenceId(slotLocalRef); if (slotSourceId.indexOf(":") > -1) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " has a valid slot"); continue; } //if not, correct slot reference Reference apptEdsReference = ReferenceHelper.createReference(appt.getResourceType(), appt.getId()); Reference apptLocalReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, apptEdsReference); String sourceId = ReferenceHelper.getReferenceId(apptLocalReference); Reference slotLocalReference = ReferenceHelper.createReference(ResourceType.Slot, sourceId); Reference slotEdsReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(slotLocalReference, csvHelper); String slotEdsReferenceValue = slotEdsReference.getReference(); String oldSlotRefValue = slotRef.getReference(); slotRef.setReference(slotEdsReferenceValue); //LOG.debug("Appointment " + historyWrapper.getResourceId() + " slot ref changed from " + oldSlotRefValue + " to " + slotEdsReferenceValue); //save appointment json = FhirSerializationHelper.serializeResource(appt); historyWrapper.setResourceData(json); saveResourceWrapper(serviceId, historyWrapper); fixed++; } } done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts"); } } LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts"); LOG.info("Finished Fixing Slot References in Appointments for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } } /*private static void fixReviews(String sharedStoragePath, UUID justThisService) { LOG.info("Fixing Reviews using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); Map<String, Long> problemCodes = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); LOG.info("Doing Emis CSV exchange " + exchangeId + " with " + batches.size() + " batches"); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem problemParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (problemParser.nextRecord()) { String patientGuid = problemParser.getPatientGuid(); String observationGuid = problemParser.getObservationGuid(); String key = patientGuid + ":" + observationGuid; if (!problemCodes.containsKey(key)) { problemCodes.put(key, null); } } problemParser.close(); while (observationParser.nextRecord()) { String patientGuid = observationParser.getPatientGuid(); String observationGuid = observationParser.getObservationGuid(); String key = patientGuid + ":" + observationGuid; if (problemCodes.containsKey(key)) { Long codeId = observationParser.getCodeId(); if (codeId == null) { continue; } problemCodes.put(key, codeId); } } observationParser.close(); LOG.info("Found " + problemCodes.size() + " problem codes so far"); String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f); EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId); while (observationParser.nextRecord()) { String problemGuid = observationParser.getProblemGuid(); if (!Strings.isNullOrEmpty(problemGuid)) { String patientGuid = observationParser.getPatientGuid(); Long codeId = observationParser.getCodeId(); if (codeId == null) { continue; } String key = patientGuid + ":" + problemGuid; Long problemCodeId = problemCodes.get(key); if (problemCodeId == null || problemCodeId.longValue() != codeId.longValue()) { continue; } //if here, our code is the same as the problem, so it's a review String locallyUniqueId = patientGuid + ":" + observationParser.getObservationGuid(); ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, helper); for (UUID systemId: systemIds) { UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid); } UUID edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); if (edsObservationId == null) { //try observations as diagnostic reports, because it could be one of those instead if (resourceType == ResourceType.Observation) { resourceType = ResourceType.DiagnosticReport; edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); } if (edsObservationId == null) { throw new Exception("Failed to find observation ID for service " + serviceId + " system " + systemId + " resourceType " + resourceType + " local ID " + locallyUniqueId); } } List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds == null) { //if there are no batches for this patient, we'll be handling this data in another exchange continue; //throw new Exception("Failed to find batch ID for patient " + edsPatientId + " in exchange " + exchangeId + " for resource " + resourceType + " " + edsObservationId); } for (UUID batchId: batchIds) { List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), edsObservationId); if (resourceByExchangeBatches.isEmpty()) { //if we've deleted data, this will be null continue; //throw new Exception("No resources found for batch " + batchId + " resource type " + resourceType + " and resource id " + edsObservationId); } for (ResourceByExchangeBatch resourceByExchangeBatch: resourceByExchangeBatches) { String json = resourceByExchangeBatch.getResourceData(); if (Strings.isNullOrEmpty(json)) { throw new Exception("No JSON in resource " + resourceType + " " + edsObservationId + " in batch " + batchId); } Resource resource = parserPool.parse(json); if (addReviewExtension((DomainResource)resource)) { json = parserPool.composeString(resource); resourceByExchangeBatch.setResourceData(json); LOG.info("Changed " + resourceType + " " + edsObservationId + " to have extension in batch " + batchId); resourceRepository.save(resourceByExchangeBatch); UUID versionUuid = resourceByExchangeBatch.getVersion(); ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(edsObservationId, resourceType.toString(), versionUuid); if (resourceHistory == null) { throw new Exception("Failed to find resource history for " + resourceType + " " + edsObservationId + " and version " + versionUuid); } resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); resourceRepository.save(resourceHistory); ResourceByService resourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType.toString(), edsObservationId); if (resourceByService != null) { UUID serviceVersionUuid = resourceByService.getCurrentVersion(); if (serviceVersionUuid.equals(versionUuid)) { resourceByService.setResourceData(json); resourceRepository.save(resourceByService); } } } else { LOG.info("" + resourceType + " " + edsObservationId + " already has extension"); } } } } //1. find out resource type originall saved from //2. retrieve from resource_by_exchange_batch //3. update resource in resource_by_exchange_batch //4. retrieve from resource_history //5. update resource_history //6. retrieve record from resource_by_service //7. if resource_by_service version UUID matches the resource_history updated, then update that too } } observationParser.close(); } } LOG.info("Finished Fixing Reviews"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static boolean addReviewExtension(DomainResource resource) { if (ExtensionConverter.hasExtension(resource, FhirExtensionUri.IS_REVIEW)) { return false; } Extension extension = ExtensionConverter.createExtension(FhirExtensionUri.IS_REVIEW, new BooleanType(true)); resource.addExtension(extension); return true; }*/ /*private static void runProtocolsForConfidentialPatients(String sharedStoragePath, UUID justThisService) { LOG.info("Running Protocols for Confidential Patients using path " + sharedStoragePath + " and service " + justThisService); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } //once we match the servce, set this to null to do all other services justThisService = null; LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); List<String> interestingPatientGuids = new ArrayList<>(); Map<UUID, Map<UUID, List<UUID>>> batchesPerPatientPerExchange = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } batchesPerPatientPerExchange.put(exchangeId, batchesPerPatient); File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getIsConfidential() || patientParser.getDeleted()) { interestingPatientGuids.add(patientParser.getPatientGuid()); } } patientParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class); while (consultationParser.nextRecord()) { if (consultationParser.getIsConfidential() && !consultationParser.getDeleted()) { interestingPatientGuids.add(consultationParser.getPatientGuid()); } } consultationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { if (observationParser.getIsConfidential() && !observationParser.getDeleted()) { interestingPatientGuids.add(observationParser.getPatientGuid()); } } observationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class); while (diaryParser.nextRecord()) { if (diaryParser.getIsConfidential() && !diaryParser.getDeleted()) { interestingPatientGuids.add(diaryParser.getPatientGuid()); } } diaryParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class); while (drugRecordParser.nextRecord()) { if (drugRecordParser.getIsConfidential() && !drugRecordParser.getDeleted()) { interestingPatientGuids.add(drugRecordParser.getPatientGuid()); } } drugRecordParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class); while (issueRecordParser.nextRecord()) { if (issueRecordParser.getIsConfidential() && !issueRecordParser.getDeleted()) { interestingPatientGuids.add(issueRecordParser.getPatientGuid()); } } issueRecordParser.close(); } Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>(); for (String interestingPatientGuid: interestingPatientGuids) { if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, interestingPatientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + interestingPatientGuid); } for (UUID exchangeId: batchesPerPatientPerExchange.keySet()) { Map<UUID, List<UUID>> batchesPerPatient = batchesPerPatientPerExchange.get(exchangeId); List<UUID> batches = batchesPerPatient.get(edsPatientId); if (batches != null) { Set<UUID> batchesForExchange = exchangeBatchesToPutInProtocolQueue.get(exchangeId); if (batchesForExchange == null) { batchesForExchange = new HashSet<>(); exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchesForExchange); } batchesForExchange.addAll(batches); } } } if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) { //find the config for our protocol queue String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) { Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } } LOG.info("Finished Running Protocols for Confidential Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixOrgs() { LOG.info("Posting orgs to protocol queue"); String[] orgIds = new String[]{ "332f31a2-7b28-47cb-af6f-18f65440d43d", "c893d66b-eb89-4657-9f53-94c5867e7ed9"}; ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); Map<UUID, Set<UUID>> exchangeBatches = new HashMap<>(); for (String orgId: orgIds) { LOG.info("Doing org ID " + orgId); UUID orgUuid = UUID.fromString(orgId); try { //select batch_id from ehr.resource_by_exchange_batch where resource_type = 'Organization' and resource_id = 8f465517-729b-4ad9-b405-92b487047f19 LIMIT 1 ALLOW FILTERING; ResourceByExchangeBatch resourceByExchangeBatch = resourceRepository.getFirstResourceByExchangeBatch(ResourceType.Organization.toString(), orgUuid); UUID batchId = resourceByExchangeBatch.getBatchId(); //select exchange_id from ehr.exchange_batch where batch_id = 1a940e10-1535-11e7-a29d-a90b99186399 LIMIT 1 ALLOW FILTERING; ExchangeBatch exchangeBatch = exchangeBatchRepository.retrieveFirstForBatchId(batchId); UUID exchangeId = exchangeBatch.getExchangeId(); Set<UUID> list = exchangeBatches.get(exchangeId); if (list == null) { list = new HashSet<>(); exchangeBatches.put(exchangeId, list); } list.add(batchId); } catch (Exception ex) { LOG.error("", ex); break; } } try { //find the config for our protocol queue (which is in the inbound config) String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatches.keySet()) { Set<UUID> batchIds = exchangeBatches.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } catch (Exception ex) { LOG.error("", ex); return; } LOG.info("Finished posting orgs to protocol queue"); }*/ /*private static void findCodes() { LOG.info("Finding missing codes"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT service_id, system_id, exchange_id, version FROM audit.exchange_transform_audit ALLOW FILTERING;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID serviceId = row.get(0, UUID.class); UUID systemId = row.get(1, UUID.class); UUID exchangeId = row.get(2, UUID.class); UUID version = row.get(3, UUID.class); ExchangeTransformAudit audit = auditRepository.getExchangeTransformAudit(serviceId, systemId, exchangeId, version); String xml = audit.getErrorXml(); if (xml == null) { continue; } String codePrefix = "Failed to find clinical code CodeableConcept for codeId "; int codeIndex = xml.indexOf(codePrefix); if (codeIndex > -1) { int startIndex = codeIndex + codePrefix.length(); int tagEndIndex = xml.indexOf("<", startIndex); String code = xml.substring(startIndex, tagEndIndex); Service service = serviceRepository.getById(serviceId); String name = service.getName(); LOG.info(name + " clinical code " + code + " from " + audit.getStarted()); continue; } codePrefix = "Failed to find medication CodeableConcept for codeId "; codeIndex = xml.indexOf(codePrefix); if (codeIndex > -1) { int startIndex = codeIndex + codePrefix.length(); int tagEndIndex = xml.indexOf("<", startIndex); String code = xml.substring(startIndex, tagEndIndex); Service service = serviceRepository.getById(serviceId); String name = service.getName(); LOG.info(name + " drug code " + code + " from " + audit.getStarted()); continue; } } LOG.info("Finished finding missing codes"); }*/ private static void createTppSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating TPP Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line: lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createTppSubsetForFile(sourceDir, destDir, personIds); LOG.info("Finished Creating TPP Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createTppSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile: files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } //LOG.info("Doing dir " + sourceFile); createTppSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } Charset encoding = Charset.forName("CP1252"); InputStreamReader reader = new InputStreamReader( new BufferedInputStream( new FileInputStream(sourceFile)), encoding); CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader(); CSVParser parser = new CSVParser(reader, format); String filterColumn = null; Map<String, Integer> headerMap = parser.getHeaderMap(); if (headerMap.containsKey("IDPatient")) { filterColumn = "IDPatient"; } else if (name.equalsIgnoreCase("SRPatient.csv")) { filterColumn = "RowIdentifier"; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( new FileOutputStream(destFile), encoding)); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); /*} else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); copyFile(sourceFile, destFile); }*/ } } } private static void createVisionSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Vision Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line: lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createVisionSubsetForFile(sourceDir, destDir, personIds); LOG.info("Finished Creating Vision Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createVisionSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile: files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createVisionSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; if (name.contains("encounter_data") || name.contains("journal_data") || name.contains("patient_data") || name.contains("referral_data")) { filterColumn = 0; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void createHomertonSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Homerton Subset"); try { Set<String> PersonIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line: lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } PersonIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createHomertonSubsetForFile(sourceDir, destDir, PersonIds); LOG.info("Finished Creating Homerton Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createHomertonSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile: files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createHomertonSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); //fully quote destination file to fix CRLF in columns CSVFormat format = CSVFormat.DEFAULT.withHeader(); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; //PersonId column at 1 if (name.contains("ENCOUNTER") || name.contains("PATIENT")) { filterColumn = 1; } else if (name.contains("DIAGNOSIS")) { //PersonId column at 13 filterColumn = 13; } else if (name.contains("ALLERGY")) { //PersonId column at 2 filterColumn = 2; } else if (name.contains("PROBLEM")) { //PersonId column at 4 filterColumn = 4; } else { //if no patient column, just copy the file (i.e. PROCEDURE) parser.close(); LOG.info("Copying file without PatientId " + sourceFile); copyFile(sourceFile, destFile); continue; } Map<String, Integer> headerMap = parser.getHeaderMap(); String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void createAdastraSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Adastra Subset"); try { Set<String> caseIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line: lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } //adastra extract files are all keyed on caseId caseIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createAdastraSubsetForFile(sourceDir, destDir, caseIds); LOG.info("Finished Creating Adastra Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createAdastraSubsetForFile(File sourceDir, File destDir, Set<String> caseIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile: files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createAdastraSubsetForFile(sourceFile, destFile, caseIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); //fully quote destination file to fix CRLF in columns CSVFormat format = CSVFormat.DEFAULT.withDelimiter('|'); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; //CaseRef column at 0 if (name.contains("NOTES") || name.contains("CASEQUESTIONS") || name.contains("OUTCOMES") || name.contains("CONSULTATION") || name.contains("CLINICALCODES") || name.contains("PRESCRIPTIONS") || name.contains("PATIENT")) { filterColumn = 0; } else if (name.contains("CASE")) { //CaseRef column at 2 filterColumn = 2; } else if (name.contains("PROVIDER")) { //CaseRef column at 7 filterColumn = 7; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String caseId = csvRecord.get(filterColumn); if (caseIds.contains(caseId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void exportFhirToCsv(UUID serviceId, String destinationPath) { try { File dir = new File(destinationPath); if (dir.exists()) { dir.mkdirs(); } Map<String, CSVPrinter> hmPrinters = new HashMap<>(); EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); PreparedStatement ps = connection.prepareStatement("SELECT resource_id, resource_type, resource_data FROM resource_current"); LOG.debug("Running query"); ResultSet rs = ps.executeQuery(); LOG.debug("Got result set"); while (rs.next()) { String id = rs.getString(1); String type = rs.getString(2); String json = rs.getString(3); CSVPrinter printer = hmPrinters.get(type); if (printer == null) { String path = FilenameUtils.concat(dir.getAbsolutePath(), type + ".tsv"); FileWriter fileWriter = new FileWriter(new File(path)); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVFormat format = CSVFormat.DEFAULT .withHeader("resource_id", "resource_json") .withDelimiter('\t') .withEscape((Character) null) .withQuote((Character) null) .withQuoteMode(QuoteMode.MINIMAL); printer = new CSVPrinter(bufferedWriter, format); hmPrinters.put(type, printer); } printer.printRecord(id, json); } for (String type : hmPrinters.keySet()) { CSVPrinter printer = hmPrinters.get(type); printer.flush(); printer.close(); } ps.close(); entityManager.close(); } catch (Throwable t) { LOG.error("", t); } } } /*class ResourceFiler extends FhirResourceFiler { public ResourceFiler(UUID exchangeId, UUID serviceId, UUID systemId, TransformError transformError, List<UUID> batchIdsCreated, int maxFilingThreads) { super(exchangeId, serviceId, systemId, transformError, batchIdsCreated, maxFilingThreads); } private List<Resource> newResources = new ArrayList<>(); public List<Resource> getNewResources() { return newResources; } @Override public void saveAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception { throw new Exception("shouldn't be calling saveAdminResource"); } @Override public void deleteAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception { throw new Exception("shouldn't be calling deleteAdminResource"); } @Override public void savePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception { for (Resource resource: resources) { if (mapIds) { IdHelper.mapIds(getServiceId(), getSystemId(), resource); } newResources.add(resource); } } @Override public void deletePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception { throw new Exception("shouldn't be calling deletePatientResource"); } }*/
src/eds-queuereader/src/main/java/org/endeavourhealth/queuereader/Main.java
package org.endeavourhealth.queuereader; import OpenPseudonymiser.Crypto; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.ObjectMetadata; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.S3Object; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; import com.google.common.collect.Lists; import org.apache.commons.csv.*; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.endeavourhealth.common.cache.ObjectMapperPool; import org.endeavourhealth.common.config.ConfigManager; import org.endeavourhealth.common.fhir.*; import org.endeavourhealth.common.utility.FileHelper; import org.endeavourhealth.common.utility.SlackHelper; import org.endeavourhealth.core.configuration.ConfigDeserialiser; import org.endeavourhealth.core.configuration.PostMessageToExchangeConfig; import org.endeavourhealth.core.configuration.QueueReaderConfiguration; import org.endeavourhealth.core.csv.CsvHelper; import org.endeavourhealth.core.database.dal.DalProvider; import org.endeavourhealth.core.database.dal.admin.ServiceDalI; import org.endeavourhealth.core.database.dal.admin.models.Service; import org.endeavourhealth.core.database.dal.audit.ExchangeBatchDalI; import org.endeavourhealth.core.database.dal.audit.ExchangeDalI; import org.endeavourhealth.core.database.dal.audit.models.*; import org.endeavourhealth.core.database.dal.eds.PatientLinkDalI; import org.endeavourhealth.core.database.dal.eds.PatientSearchDalI; import org.endeavourhealth.core.database.dal.ehr.ResourceDalI; import org.endeavourhealth.core.database.dal.ehr.models.ResourceWrapper; import org.endeavourhealth.core.database.dal.publisherTransform.models.ResourceFieldMapping; import org.endeavourhealth.core.database.dal.reference.PostcodeDalI; import org.endeavourhealth.core.database.dal.reference.models.PostcodeLookup; import org.endeavourhealth.core.database.dal.subscriberTransform.EnterpriseAgeUpdaterlDalI; import org.endeavourhealth.core.database.dal.subscriberTransform.EnterpriseIdDalI; import org.endeavourhealth.core.database.dal.subscriberTransform.models.EnterpriseAge; import org.endeavourhealth.core.database.rdbms.ConnectionManager; import org.endeavourhealth.core.exceptions.TransformException; import org.endeavourhealth.core.fhirStorage.FhirSerializationHelper; import org.endeavourhealth.core.fhirStorage.FhirStorageService; import org.endeavourhealth.core.fhirStorage.JsonServiceInterfaceEndpoint; import org.endeavourhealth.core.messaging.pipeline.components.PostMessageToExchange; import org.endeavourhealth.core.queueing.QueueHelper; import org.endeavourhealth.core.xml.TransformErrorSerializer; import org.endeavourhealth.core.xml.transformError.TransformError; import org.endeavourhealth.subscriber.filer.EnterpriseFiler; import org.endeavourhealth.transform.barts.transforms.PPADDTransformer; import org.endeavourhealth.transform.barts.transforms.PPNAMTransformer; import org.endeavourhealth.transform.barts.transforms.PPPHOTransformer; import org.endeavourhealth.transform.common.*; import org.endeavourhealth.transform.common.resourceBuilders.PatientBuilder; import org.endeavourhealth.transform.emis.EmisCsvToFhirTransformer; import org.endeavourhealth.transform.emis.csv.helpers.EmisCsvHelper; import org.endeavourhealth.transform.enterprise.json.LinkDistributorConfig; import org.endeavourhealth.transform.enterprise.transforms.PatientTransformer; import org.hibernate.internal.SessionImpl; import org.hl7.fhir.instance.model.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.persistence.EntityManager; import java.io.*; import java.lang.reflect.Constructor; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.sql.*; import java.text.SimpleDateFormat; import java.util.*; import java.util.Date; import java.util.concurrent.atomic.AtomicInteger; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; public class Main { private static final Logger LOG = LoggerFactory.getLogger(Main.class); public static void main(String[] args) throws Exception { String configId = args[0]; LOG.info("Initialising config manager"); ConfigManager.initialize("queuereader", configId); /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEncounters")) { String table = args[1]; fixEncounters(table); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CreateHomertonSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createHomertonSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateAdastraSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createAdastraSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateVisionSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createVisionSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateTppSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createTppSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateBartsSubset")) { String sourceDirPath = args[1]; UUID serviceUuid = UUID.fromString(args[2]); UUID systemUuid = UUID.fromString(args[3]); String samplePatientsFile = args[4]; createBartsSubset(sourceDirPath, serviceUuid, systemUuid, samplePatientsFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsOrgs")) { String serviceId = args[1]; fixBartsOrgs(serviceId); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestPreparedStatements")) { String url = args[1]; String user = args[2]; String pass = args[3]; String serviceId = args[4]; testPreparedStatements(url, user, pass, serviceId); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CreateTransformMap")) { UUID serviceId = UUID.fromString(args[1]); String table = args[2]; String dstFile = args[3]; createTransforMap(serviceId, table, dstFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("ExportFhirToCsv")) { UUID serviceId = UUID.fromString(args[1]); String path = args[2]; exportFhirToCsv(serviceId, path); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("TestBatchInserts")) { String url = args[1]; String user = args[2]; String pass = args[3]; String num = args[4]; String batchSize = args[5]; testBatchInserts(url, user, pass, num, batchSize); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("ApplyEmisAdminCaches")) { applyEmisAdminCaches(); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixSubscribers")) { fixSubscriberDbs(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems")) { String serviceId = args[1]; String systemId = args[2]; fixEmisProblems(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("TestS3Read")) { String s3Bucket = args[1]; String s3Key = args[2]; String start = args[3]; String len = args[4]; testS3Read(s3Bucket, s3Key, start, len); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems3ForPublisher")) { String publisherId = args[1]; String systemId = args[2]; fixEmisProblems3ForPublisher(publisherId, UUID.fromString(systemId)); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems3")) { String serviceId = args[1]; String systemId = args[2]; fixEmisProblems3(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CheckDeletedObs")) { String serviceId = args[1]; String systemId = args[2]; checkDeletedObs(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixPersonsNoNhsNumber")) { fixPersonsNoNhsNumber(); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateSubscriberUprnTable")) { String subscriberConfigName = args[1]; populateSubscriberUprnTable(subscriberConfigName); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("PostToRabbit")) { String exchangeName = args[1]; String srcFile = args[2]; postToRabbit(exchangeName, srcFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostToProtocol")) { String srcFile = args[1]; postToProtocol(srcFile); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsPatients")) { UUID serviceId = UUID.fromString(args[1]); fixBartsPatients(serviceId); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixDeceasedPatients")) { String subscriberConfig = args[1]; fixDeceasedPatients(subscriberConfig); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixPseudoIds")) { String subscriberConfig = args[1]; int threads = Integer.parseInt(args[2]); fixPseudoIds(subscriberConfig, threads); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("ConvertExchangeBody")) { String systemId = args[1]; convertExchangeBody(UUID.fromString(systemId)); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixReferrals")) { fixReferralRequests(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateNewSearchTable")) { String table = args[1]; populateNewSearchTable(table); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsEscapes")) { String filePath = args[1]; fixBartsEscapedFiles(filePath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostToInbound")) { String serviceId = args[1]; String systemId = args[2]; String filePath = args[3]; postToInboundFromFile(UUID.fromString(serviceId), UUID.fromString(systemId), filePath); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("FixDisabledExtract")) { String sharedStoragePath = args[1]; String tempDir = args[2]; String systemId = args[3]; String serviceOdsCode = args[4]; fixDisabledEmisExtract(serviceOdsCode, systemId, sharedStoragePath, tempDir); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestSlack")) { testSlack(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostToInbound")) { String serviceId = args[1]; boolean all = Boolean.parseBoolean(args[2]); postToInbound(UUID.fromString(serviceId), all); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("FixPatientSearch")) { String serviceId = args[1]; String systemId = null; if (args.length > 2) { systemId = args[2]; } if (serviceId.equalsIgnoreCase("All")) { fixPatientSearchAllServices(systemId); } else { fixPatientSearch(serviceId, systemId); } System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixSlotReferences")) { String serviceId = args[1]; try { UUID serviceUuid = UUID.fromString(serviceId); fixSlotReferences(serviceUuid); } catch (Exception ex) { fixSlotReferencesForPublisher(serviceId); } System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("TestS3VsMySQL")) { UUID serviceUuid = UUID.fromString(args[1]); int count = Integer.parseInt(args[2]); int sqlBatchSize = Integer.parseInt(args[3]); String bucketName = args[4]; testS3VsMySql(serviceUuid, count, sqlBatchSize, bucketName); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("Exit")) { String exitCode = args[1]; LOG.info("Exiting with error code " + exitCode); int exitCodeInt = Integer.parseInt(exitCode); System.exit(exitCodeInt); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("RunSql")) { String host = args[1]; String username = args[2]; String password = args[3]; String sqlFile = args[4]; runSql(host, username, password, sqlFile); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateProtocolQueue")) { String serviceId = null; if (args.length > 1) { serviceId = args[1]; } String startingExchangeId = null; if (args.length > 2) { startingExchangeId = args[2]; } populateProtocolQueue(serviceId, startingExchangeId); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindEncounterTerms")) { String path = args[1]; String outputPath = args[2]; findEncounterTerms(path, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindEmisStartDates")) { String path = args[1]; String outputPath = args[2]; findEmisStartDates(path, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("ExportHl7Encounters")) { String sourceCsvPpath = args[1]; String outputPath = args[2]; exportHl7Encounters(sourceCsvPpath, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixExchangeBatches")) { fixExchangeBatches(); System.exit(0); }*/ /*if (args.length >= 0 && args[0].equalsIgnoreCase("FindCodes")) { findCodes(); System.exit(0); }*/ /*if (args.length >= 0 && args[0].equalsIgnoreCase("FindDeletedOrgs")) { findDeletedOrgs(); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("LoadBartsData")) { String serviceId = args[1]; String systemId = args[2]; String dbUrl = args[3]; String dbUsername = args[4]; String dbPassword = args[5]; String onlyThisFileType = null; if (args.length > 6) { onlyThisFileType = args[6]; } loadBartsData(serviceId, systemId, dbUrl, dbUsername, dbPassword, onlyThisFileType); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateBartsDataTables")) { createBartsDataTables(); System.exit(0); } if (args.length != 1) { LOG.error("Usage: queuereader config_id"); return; } LOG.info("--------------------------------------------------"); LOG.info("EDS Queue Reader " + configId); LOG.info("--------------------------------------------------"); LOG.info("Fetching queuereader configuration"); String configXml = ConfigManager.getConfiguration(configId); QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); /*LOG.info("Registering shutdown hook"); registerShutdownHook();*/ // Instantiate rabbit handler LOG.info("Creating EDS queue reader"); RabbitHandler rabbitHandler = new RabbitHandler(configuration, configId); // Begin consume rabbitHandler.start(); LOG.info("EDS Queue reader running (kill file location " + TransformConfig.instance().getKillFileLocation() + ")"); } private static void testS3VsMySql(UUID serviceUuid, int count, int sqlBatchSize, String bucketName) { LOG.debug("Testing S3 vs MySQL for service " + serviceUuid); try { //retrieve some audit JSON from the DB EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); String sql = "select resource_id, resource_type, version, mappings_json" + " from resource_field_mappings" + " where mappings_json != '[]'"; if (count > -1) { sql += "limit " + count + ";"; } Statement statement = connection.createStatement(); statement.setFetchSize(1000); ResultSet rs = statement.executeQuery(sql); List<ResourceFieldMapping> list = new ArrayList<>(); while (rs.next()) { int col = 1; String resourceId = rs.getString(col++); String resourceType = rs.getString(col++); String version = rs.getString(col++); String json = rs.getString(col++); ResourceFieldMapping obj = new ResourceFieldMapping(); obj.setResourceId(UUID.fromString(resourceId)); obj.setResourceType(resourceType); obj.setVersion(UUID.fromString(version)); obj.setResourceField(json); list.add(obj); } rs.close(); statement.close(); entityManager.close(); int done = 0; //test writing to S3 long s3Start = System.currentTimeMillis(); LOG.debug("Doing S3 test"); for (int i=0; i<list.size(); i++) { ResourceFieldMapping mapping = list.get(i); String entryName = mapping.getVersion().toString() + ".json"; String keyName = "auditTest/" + serviceUuid + "/" + mapping.getResourceType() + "/" + mapping.getResourceId() + "/" + mapping.getVersion() + ".zip"; String jsonStr = mapping.getResourceField(); //may as well zip the data, since it will compress well ByteArrayOutputStream baos = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(baos); zos.putNextEntry(new ZipEntry(entryName)); zos.write(jsonStr.getBytes()); zos.flush(); zos.close(); byte[] bytes = baos.toByteArray(); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes); //ProfileCredentialsProvider credentialsProvider = new ProfileCredentialsProvider(); DefaultAWSCredentialsProviderChain credentialsProvider = DefaultAWSCredentialsProviderChain.getInstance(); AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder .standard() .withCredentials(credentialsProvider) .withRegion(Regions.EU_WEST_2); AmazonS3 s3Client = clientBuilder.build(); ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); objectMetadata.setContentLength(bytes.length); PutObjectRequest putRequest = new PutObjectRequest(bucketName, keyName, byteArrayInputStream, objectMetadata); s3Client.putObject(putRequest); done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + list.size()); } } long s3End = System.currentTimeMillis(); LOG.debug("S3 took " + (s3End - s3Start) + " ms"); //test inserting into a DB long sqlStart = System.currentTimeMillis(); LOG.debug("Doing SQL test"); sql = "insert into drewtest.json_speed_test (resource_id, resource_type, created_at, version, mappings_json) values (?, ?, ?, ?, ?)"; entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); PreparedStatement ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); done = 0; int currentBatchSize = 0; for (int i=0; i<list.size(); i++) { ResourceFieldMapping mapping = list.get(i); int col = 1; ps.setString(col++, mapping.getResourceId().toString()); ps.setString(col++, mapping.getResourceType()); ps.setDate(col++, new java.sql.Date(System.currentTimeMillis())); ps.setString(col++, mapping.getVersion().toString()); ps.setString(col++, mapping.getResourceField()); ps.addBatch(); currentBatchSize ++; if (currentBatchSize >= sqlBatchSize || i+1 == list.size()) { ps.executeBatch(); entityManager.getTransaction().commit(); //mirror what would happen normally ps.close(); entityManager.close(); if (i+1 < list.size()) { entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); } } done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + list.size()); } } long sqlEnd = System.currentTimeMillis(); LOG.debug("SQL took " + (sqlEnd - sqlStart) + " ms"); LOG.debug("Finished Testing S3 vs MySQL for service " + serviceUuid); } catch (Throwable t) { LOG.error("", t); } } private static void createBartsDataTables() { LOG.debug("Creating Barts data tables"); try { List<String> fileTypes = new ArrayList<>(); fileTypes.add("AEATT"); fileTypes.add("Birth"); //fileTypes.add("BulkDiagnosis"); //fileTypes.add("BulkProblem"); //fileTypes.add("BulkProcedure"); fileTypes.add("CLEVE"); fileTypes.add("CVREF"); fileTypes.add("Diagnosis"); fileTypes.add("ENCINF"); fileTypes.add("ENCNT"); fileTypes.add("FamilyHistory"); fileTypes.add("IPEPI"); fileTypes.add("IPWDS"); fileTypes.add("LOREF"); fileTypes.add("NOMREF"); fileTypes.add("OPATT"); fileTypes.add("ORGREF"); fileTypes.add("PPADD"); fileTypes.add("PPAGP"); fileTypes.add("PPALI"); fileTypes.add("PPINF"); fileTypes.add("PPNAM"); fileTypes.add("PPPHO"); fileTypes.add("PPREL"); fileTypes.add("Pregnancy"); fileTypes.add("Problem"); fileTypes.add("PROCE"); fileTypes.add("Procedure"); fileTypes.add("PRSNLREF"); fileTypes.add("SusEmergency"); fileTypes.add("SusInpatient"); fileTypes.add("SusOutpatient"); //fileTypes.add("Tails"); TODO - have three separate tails files fileTypes.add("EventCode"); fileTypes.add("EventSetCanon"); fileTypes.add("EventSet"); fileTypes.add("EventSetExplode"); fileTypes.add("BlobContent"); fileTypes.add("SusInpatientTail"); fileTypes.add("SusOutpatientTail"); fileTypes.add("SusEmergencyTail"); fileTypes.add("AEINV"); fileTypes.add("AETRE"); fileTypes.add("OPREF"); fileTypes.add("STATREF"); fileTypes.add("RTTPE"); fileTypes.add("PPATH"); fileTypes.add("DOCRP"); fileTypes.add("SCHAC"); fileTypes.add("EALEN"); fileTypes.add("DELIV"); fileTypes.add("EALOF"); fileTypes.add("SusEmergencyCareDataSet"); fileTypes.add("SusEmergencyCareDataSetTail"); for (String fileType: fileTypes) { createBartsDataTable(fileType); } LOG.debug("Finished Creating Barts data tables"); } catch (Throwable t) { LOG.error("", t); } } private static void createBartsDataTable(String fileType) throws Exception { ParserI parser = null; try { String clsName = "org.endeavourhealth.transform.barts.schema." + fileType; Class cls = Class.forName(clsName); //now construct an instance of the parser for the file we've found Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class); parser = constructor.newInstance(null, null, null, null, null); } catch (ClassNotFoundException cnfe) { System.out.println("-- No parser for file type [" + fileType + "]"); return; } System.out.println("-- " + fileType); String table = fileType.replace(" ", "_"); String dropSql = "DROP TABLE IF EXISTS `" + table + "`;"; System.out.println(dropSql); String sql = "CREATE TABLE `" + table + "` ("; sql += "file_name varchar(100)"; if (parser instanceof AbstractFixedParser) { AbstractFixedParser fixedParser = (AbstractFixedParser)parser; List<FixedParserField> fields = fixedParser.getFieldList(); for (FixedParserField field: fields) { String col = field.getName(); int len = field.getFieldlength(); sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); sql += " varchar("; sql += len; sql += ")"; } } else { List<String> cols = parser.getColumnHeaders(); for (String col: cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); if (col.equals("BLOB_CONTENTS") || col.equals("VALUE_LONG_TXT") || col.equals("COMMENT_TXT") || col.equals("NONPREG_REL_PROBLM_SCT_CD")) { sql += " mediumtext"; } else if (col.indexOf("Date") > -1 || col.indexOf("Time") > -1) { sql += " varchar(10)"; } else { sql += " varchar(255)"; } } } sql += ");"; /*LOG.debug("-- fileType"); LOG.debug(sql);*/ System.out.println(sql); } private static void loadBartsData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String onlyThisFileType) { LOG.debug("Loading Barts data from into " + dbUrl); try { //hash file type of every file ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); Date startDate = sdf.parse("2018-11-01"); //Date startDate = sdf.parse("2018-09-17"); //Date endDate = sdf.parse("2018-09-30"); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); String exchangeBody = exchange.getBody(); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody); if (files.isEmpty()) { continue; } for (ExchangePayloadFile file: files) { String type = file.getType(); String path = file.getPath(); //if only doing a specific file type, skip all others if (onlyThisFileType != null && !type.equals(onlyThisFileType)) { continue; } boolean processFile = false; if (type.equalsIgnoreCase("CVREF") || type.equalsIgnoreCase("LOREF") || type.equalsIgnoreCase("ORGREF") || type.equalsIgnoreCase("PRSNLREF") || type.equalsIgnoreCase("NOMREF")) { processFile = true; } else { File f = new File(path); File parentFile = f.getParentFile(); String parentDir = parentFile.getName(); Date extractDate = sdf.parse(parentDir); if (!extractDate.before(startDate)) { processFile = true; } /*if (!extractDate.before(startDate) && !extractDate.after(endDate)) { processFile = true; }*/ } if (processFile) { loadBartsDataFromFile(conn, path, type); } } } conn.close(); LOG.debug("Finished Loading Barts data from into " + dbUrl); } catch (Throwable t) { LOG.error("", t); } } private static void loadBartsDataFromFile(Connection conn, String filePath, String fileType) throws Exception { LOG.debug("Loading " + fileType + ": " + filePath); String fileName = FilenameUtils.getName(filePath); ParserI parser = null; try { String clsName = "org.endeavourhealth.transform.barts.schema." + fileType; Class cls = Class.forName(clsName); //now construct an instance of the parser for the file we've found Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class); parser = constructor.newInstance(null, null, null, null, filePath); } catch (ClassNotFoundException cnfe) { LOG.error("No parser for file type [" + fileType + "]"); return; } String table = fileType.replace(" ", "_"); //check table is there String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1"; Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery(sql); boolean tableExists = rs.next(); rs.close(); statement.close(); if (!tableExists) { LOG.error("No table exists for " + table); return; } //create insert statement sql = "INSERT INTO `" + table + "` ("; sql += "file_name"; List<String> cols = parser.getColumnHeaders(); for (String col: cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); } sql += ") VALUES ("; sql += "?"; for (String col: cols) { sql += ", "; sql += "?"; } sql += ")"; PreparedStatement ps = conn.prepareStatement(sql); List<String> currentBatchStrs = new ArrayList<>(); //load table try { int done = 0; int currentBatchSize = 0; while (parser.nextRecord()) { int col = 1; //file name is always first ps.setString(col++, fileName); for (String colName : cols) { CsvCell cell = parser.getCell(colName); if (cell == null) { ps.setNull(col++, Types.VARCHAR); } else { ps.setString(col++, cell.getString()); } } ps.addBatch(); currentBatchSize++; currentBatchStrs.add((ps.toString())); //for error handling if (currentBatchSize >= 5) { ps.executeBatch(); currentBatchSize = 0; currentBatchStrs.clear(); } done++; if (done % 5000 == 0) { LOG.debug("Done " + done); } } if (currentBatchSize >= 0) { ps.executeBatch(); } ps.close(); } catch (Throwable t) { LOG.error("Failed on batch with statements:"); for (String currentBatchStr: currentBatchStrs) { LOG.error(currentBatchStr); } throw t; } LOG.debug("Finished " + fileType + ": " + filePath); } private static void fixPseudoIds(String subscriberConfig, int threads) { LOG.debug("Fixing Pseudo IDs for " + subscriberConfig); try { //update psuedo ID on patient table //update psuedo ID on person table //update pseudo ID on subscriber_transform mapping table JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); JsonNode saltNode = config.get("pseudonymisation"); ObjectMapper mapper = new ObjectMapper(); Object json = mapper.readValue(saltNode.toString(), Object.class); String linkDistributors = mapper.writeValueAsString(json); LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class); LinkDistributorConfig[] arr = null; JsonNode linkDistributorsNode = config.get("linkedDistributors"); if (linkDistributorsNode != null) { json = mapper.readValue(linkDistributorsNode.toString(), Object.class); linkDistributors = mapper.writeValueAsString(json); arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class); } Connection subscriberConnection = EnterpriseFiler.openConnection(config); List<Long> patientIds = new ArrayList<>(); Map<Long, Long> hmOrgIds = new HashMap<>(); Map<Long, Long> hmPersonIds = new HashMap<>(); String sql = "SELECT id, organization_id, person_id FROM patient"; Statement statement = subscriberConnection.createStatement(); statement.setFetchSize(10000); ResultSet rs = statement.executeQuery(sql); while (rs.next()) { long patientId = rs.getLong(1); long orgId = rs.getLong(2); long personId = rs.getLong(3); patientIds.add(new Long(patientId)); hmOrgIds.put(new Long(patientId), new Long(orgId)); hmPersonIds.put(new Long(patientId), new Long(personId)); } rs.close(); subscriberConnection.close(); LOG.debug("Found " + patientIds.size() + " patients"); AtomicInteger done = new AtomicInteger(); int pos = 0; List<Thread> threadList = new ArrayList<>(); for (int i=0; i<threads; i++) { List<Long> patientSubset = new ArrayList<>(); int count = patientIds.size() / threads; if (i+1 == threads) { count = patientIds.size() - pos; } for (int j=0; j<count; j++) { Long patientId = patientIds.get(pos); patientSubset.add(patientId); pos ++; } FixPseudoIdRunnable runnable = new FixPseudoIdRunnable(subscriberConfig, patientSubset, hmOrgIds, hmPersonIds, done); Thread t = new Thread(runnable); t.start(); threadList.add(t); } while (true) { Thread.sleep(5000); boolean allDone = true; for (Thread t: threadList) { if (t.getState() != Thread.State.TERMINATED) { //if (!t.isAlive()) { allDone = false; break; } } if (allDone) { break; } } LOG.debug("Finished Fixing Pseudo IDs for " + subscriberConfig); } catch (Throwable t) { LOG.error("", t); } } static class FixPseudoIdRunnable implements Runnable { private String subscriberConfig = null; private List<Long> patientIds = null; private Map<Long, Long> hmOrgIds = null; private Map<Long, Long> hmPersonIds = null; private AtomicInteger done = null; public FixPseudoIdRunnable(String subscriberConfig, List<Long> patientIds, Map<Long, Long> hmOrgIds, Map<Long, Long> hmPersonIds, AtomicInteger done) { this.subscriberConfig = subscriberConfig; this.patientIds = patientIds; this.hmOrgIds = hmOrgIds; this.hmPersonIds = hmPersonIds; this.done = done; } @Override public void run() { try { doRun(); } catch (Throwable t) { LOG.error("", t); } } private void doRun() throws Exception { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); Connection subscriberConnection = EnterpriseFiler.openConnection(config); Statement statement = subscriberConnection.createStatement(); JsonNode saltNode = config.get("pseudonymisation"); ObjectMapper mapper = new ObjectMapper(); Object json = mapper.readValue(saltNode.toString(), Object.class); String linkDistributors = mapper.writeValueAsString(json); LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class); LinkDistributorConfig[] arr = null; JsonNode linkDistributorsNode = config.get("linkedDistributors"); if (linkDistributorsNode != null) { json = mapper.readValue(linkDistributorsNode.toString(), Object.class); linkDistributors = mapper.writeValueAsString(json); arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class); } //PseudoIdDalI pseudoIdDal = DalProvider.factoryPseudoIdDal(subscriberConfig); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection subscriberTransformConnection = session.connection(); Statement subscriberTransformStatement = subscriberTransformConnection.createStatement(); String sql = null; ResultSet rs = null; for (Long patientId: patientIds) { Long orgId = hmOrgIds.get(patientId); Long personId = hmPersonIds.get(patientId); //find service ID sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId; rs = subscriberTransformStatement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId); } String serviceId = rs.getString(1); rs.close(); //find patient ID sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId; rs = subscriberTransformStatement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find resource iD for patient ID " + patientId); } String resourceType = rs.getString(1); String resourceId = rs.getString(2); rs.close(); if (!resourceType.equals("Patient")) { throw new Exception("Not a patient resource type for enterprise ID " + patientId); } //get patient Resource resource = null; try { resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.Patient, resourceId); } catch (Exception ex) { throw new Exception("Failed to get patient " + resourceId + " for service " + serviceId, ex); } if (resource == null) { LOG.error("Failed to find patient resource for " + ResourceType.Patient + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); continue; //throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); } Patient patient = (Patient)resource; //generate new pseudo ID String pseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, salt); //save to person if (Strings.isNullOrEmpty(pseudoId)) { sql = "UPDATE person" + " SET pseudo_id = null" + " WHERE id = " + personId; statement.executeUpdate(sql); } else { sql = "UPDATE person" + " SET pseudo_id = '" + pseudoId + "'" + " WHERE id = " + personId; statement.executeUpdate(sql); } //save to patient if (Strings.isNullOrEmpty(pseudoId)) { sql = "UPDATE patient" + " SET pseudo_id = null" + " WHERE id = " + patientId; statement.executeUpdate(sql); } else { sql = "UPDATE patient" + " SET pseudo_id = '" + pseudoId + "'" + " WHERE id = " + patientId; statement.executeUpdate(sql); } //linked distributers if (arr != null) { for (LinkDistributorConfig linked: arr) { String linkedPseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, linked); sql = "INSERT INTO link_distributor (source_skid, target_salt_key_name, target_skid) VALUES ('" + pseudoId + "', '" + linked.getSaltKeyName() + "', '" + linkedPseudoId + "')" + " ON DUPLICATE KEY UPDATE" + " target_salt_key_name = VALUES(target_salt_key_name)," + " target_skid = VALUES(target_skid)"; statement.executeUpdate(sql); } } //save to subscriber transform sql = "DELETE FROM pseudo_id_map WHERE patient_id = '" + resourceId + "'"; subscriberTransformStatement.executeUpdate(sql); if (!Strings.isNullOrEmpty(pseudoId)) { sql = "INSERT INTO pseudo_id_map (patient_id, pseudo_id) VALUES ('" + resourceId + "', '" + pseudoId + "')"; subscriberTransformStatement.executeUpdate(sql); } subscriberConnection.commit(); subscriberTransformConnection.commit(); int doneLocal = done.incrementAndGet(); if (doneLocal % 1000 == 0) { LOG.debug("Done " + doneLocal); } } statement.close(); subscriberTransformStatement.close(); subscriberConnection.close(); subscriberTransformConnection.close(); } } private static void fixDeceasedPatients(String subscriberConfig) { LOG.debug("Fixing Deceased Patients for " + subscriberConfig); try { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); Connection subscriberConnection = EnterpriseFiler.openConnection(config); Map<Long, Long> patientIds = new HashMap<>(); String sql = "SELECT id, organization_id FROM patient WHERE date_of_death IS NOT NULL"; Statement statement = subscriberConnection.createStatement(); ResultSet rs = statement.executeQuery(sql); while (rs.next()) { long patientId = rs.getLong(1); long orgId = rs.getLong(2); patientIds.put(new Long(patientId), new Long(orgId)); } rs.close(); statement.close(); EnterpriseAgeUpdaterlDalI dal = DalProvider.factoryEnterpriseAgeUpdaterlDal(subscriberConfig); EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection subscriberTransformConnection = session.connection(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); for (Long patientId: patientIds.keySet()) { Long orgId = patientIds.get(patientId); statement = subscriberTransformConnection.createStatement(); sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId; rs = statement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId); } String serviceId = rs.getString(1); rs.close(); sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId; rs = statement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find resource iD for patient ID " + patientId); } String resourceType = rs.getString(1); String resourceId = rs.getString(2); rs.close(); statement.close(); Resource resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.valueOf(resourceType), resourceId); if (resource == null) { LOG.error("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); continue; //throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); } Patient patient = (Patient)resource; Date dob = patient.getBirthDate(); Date dod = patient.getDeceasedDateTimeType().getValue(); Integer[] ages = dal.calculateAgeValuesAndUpdateTable(patientId, dob, dod); updateEnterprisePatient(patientId, ages, subscriberConnection); updateEnterprisePerson(patientId, ages, subscriberConnection); } subscriberConnection.close(); subscriberTransformConnection.close(); LOG.debug("Finished Fixing Deceased Patients for " + subscriberConfig); } catch (Throwable t) { LOG.error("", t); } } private static void updateEnterprisePatient(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception { //the enterprise patient database isn't managed using hibernate, so we need to simply write a simple update statement StringBuilder sb = new StringBuilder(); sb.append("UPDATE patient SET "); sb.append("age_years = ?, "); sb.append("age_months = ?, "); sb.append("age_weeks = ? "); sb.append("WHERE id = ?"); PreparedStatement update = connection.prepareStatement(sb.toString()); if (ages[EnterpriseAge.UNIT_YEARS] == null) { update.setNull(1, Types.INTEGER); } else { update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]); } if (ages[EnterpriseAge.UNIT_MONTHS] == null) { update.setNull(2, Types.INTEGER); } else { update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]); } if (ages[EnterpriseAge.UNIT_WEEKS] == null) { update.setNull(3, Types.INTEGER); } else { update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]); } update.setLong(4, enterprisePatientId); update.addBatch(); update.executeBatch(); connection.commit(); LOG.info("Updated patient " + enterprisePatientId + " to ages " + ages[EnterpriseAge.UNIT_YEARS] + " y, " + ages[EnterpriseAge.UNIT_MONTHS] + " m " + ages[EnterpriseAge.UNIT_WEEKS] + " wks"); } private static void updateEnterprisePerson(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception { //update the age fields on the person table where the person is for our patient and their pseudo IDs match StringBuilder sb = new StringBuilder(); sb.append("UPDATE patient, person SET "); sb.append("person.age_years = ?, "); sb.append("person.age_months = ?, "); sb.append("person.age_weeks = ? "); sb.append("WHERE patient.id = ? "); sb.append("AND patient.person_id = person.id "); sb.append("AND patient.pseudo_id = person.pseudo_id"); PreparedStatement update = connection.prepareStatement(sb.toString()); if (ages[EnterpriseAge.UNIT_YEARS] == null) { update.setNull(1, Types.INTEGER); } else { update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]); } if (ages[EnterpriseAge.UNIT_MONTHS] == null) { update.setNull(2, Types.INTEGER); } else { update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]); } if (ages[EnterpriseAge.UNIT_WEEKS] == null) { update.setNull(3, Types.INTEGER); } else { update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]); } update.setLong(4, enterprisePatientId); update.addBatch(); update.executeBatch(); connection.commit(); } private static void testS3Read(String s3BucketName, String keyName, String start, String len) { LOG.debug("Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes"); try { AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder .standard() .withCredentials(DefaultAWSCredentialsProviderChain.getInstance()) .withRegion(Regions.EU_WEST_2); AmazonS3 s3Client = clientBuilder.build(); GetObjectRequest request = new GetObjectRequest(s3BucketName, keyName); long startInt = Long.parseLong(start); long lenInt = Long.parseLong(len); long endInt = startInt + lenInt; request.setRange(startInt, endInt); long startMs = System.currentTimeMillis(); S3Object object = s3Client.getObject(request); InputStream inputStream = object.getObjectContent(); InputStreamReader reader = new InputStreamReader(inputStream, Charset.defaultCharset()); StringBuilder sb = new StringBuilder(); char[] buf = new char[100]; while (true) { int read = reader.read(buf); if (read == -1 || sb.length() >= lenInt) { break; } sb.append(buf, 0, read); } reader.close(); long endMs = System.currentTimeMillis(); LOG.debug("Read " + sb.toString() + " in " + (endMs - startMs) + " ms"); LOG.debug("Finished Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes"); } catch (Throwable t) { LOG.error("", t); } } private static void createTransforMap(UUID serviceId, String table, String outputFile) { LOG.debug("Creating transform map for " + serviceId + " from " + table); try { //retrieve from table EntityManager transformEntityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId); SessionImpl session2 = (SessionImpl)transformEntityManager.getDelegate(); Connection mappingConnection = session2.connection(); EntityManager ehrEntityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session3 = (SessionImpl)ehrEntityManager.getDelegate(); Connection ehrConnection = session3.connection(); String sql = "SELECT resource_type, resource_id, version FROM " + table; Statement statement = mappingConnection.createStatement(); statement.setFetchSize(1000); ResultSet rs = statement.executeQuery(sql); LOG.debug("Got resource IDs from DB"); Map<String, Map<String, List<String>>> hm = new HashMap<>(); int count = 0; //build map up per resource while (rs.next()) { String resourceType = rs.getString("resource_type"); String resourceId = rs.getString("resource_id"); String resourceVersion = rs.getString("version"); /*sql = "SELECT * FROM resource_field_mappings WHERE version = 'a905db26-1357-4710-90ef-474f256567ed';"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*/ /*sql = "SELECT * FROM resource_field_mappings WHERE version = ?"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*/ sql = "SELECT * FROM resource_field_mappings WHERE resource_type = '" + resourceType + "' AND resource_id = '" + resourceId + "' AND version = '" + resourceVersion + "';"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql); //sql = "SELECT * FROM resource_field_mappings WHERE resource_type = ? AND resource_id = ? AND version = ?"; //sql = "SELECT * FROM resource_field_mappings WHERE resource_type = ? AND resource_id = ? AND version = ?"; //statement1.setString(1, resourceVersion); /*statement1.setString(1, resourceType); statement1.setString(2, resourceId); statement1.setString(3, resourceVersion);*/ ResultSet rs1 = null; try { rs1 = statement1.executeQuery(sql); } catch (Exception ex) { LOG.error("" + statement1); throw ex; } rs1.next(); String jsonStr = rs1.getString("mappings_json"); rs1.close(); statement1.close(); sql = "SELECT * FROM resource_history WHERE resource_type = ? AND resource_id = ? AND version = ?"; statement1 = ehrConnection.prepareStatement(sql); statement1.setString(1, resourceType); statement1.setString(2, resourceId); statement1.setString(3, resourceVersion); rs1 = statement1.executeQuery(); if (!rs1.next()) { throw new Exception("Failed to find resource_history for " + statement1.toString()); } String s = rs1.getString("resource_data"); rs1.close(); statement1.close(); if (Strings.isNullOrEmpty(s)) { continue; } JsonNode resourceJson = ObjectMapperPool.getInstance().readTree(s); Map<String, List<String>> hmResourceType = hm.get(resourceType); if (hmResourceType == null) { hmResourceType = new HashMap<>(); hm.put(resourceType, hmResourceType); } JsonNode json = ObjectMapperPool.getInstance().readTree(jsonStr); for (int i=0; i<json.size(); i++) { JsonNode child = json.get(i); JsonNode idNode = child.get("auditId"); JsonNode colsNode = child.get("cols"); if (idNode == null) { throw new Exception("No ID node in " + jsonStr); } if (colsNode == null) { throw new Exception("No cols node in " + jsonStr); } long id = idNode.asLong(); //get source file ID sql = "SELECT * FROM source_file_record WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, id); rs1 = statement1.executeQuery(); rs1.next(); long sourceFileId = rs1.getLong("source_file_id"); rs1.close(); statement1.close(); //get source file type sql = "SELECT * FROM source_file WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileId); rs1 = statement1.executeQuery(); rs1.next(); long sourceFileType = rs1.getLong("source_file_type_id"); rs1.close(); statement1.close(); //get the type desc sql = "SELECT * FROM source_file_type WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileType); rs1 = statement1.executeQuery(); rs1.next(); String fileTypeDesc = rs1.getString("description"); rs1.close(); statement1.close(); //get the cols Map<Integer, String> hmCols = new HashMap<>(); sql = "SELECT * FROM source_file_type_column WHERE source_file_type_id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileType); rs1 = statement1.executeQuery(); while (rs1.next()) { int index = rs1.getInt("column_index"); String name = rs1.getString("column_name"); hmCols.put(new Integer(index), name); } rs1.close(); statement1.close(); for (int j=0; j<colsNode.size(); j++) { JsonNode colNode = colsNode.get(j); int col = colNode.get("col").asInt(); String jsonField = colNode.get("field").asText(); int index = jsonField.indexOf("["); while (index > -1) { int endIndex = jsonField.indexOf("]", index); String prefix = jsonField.substring(0, index + 1); String suffix = jsonField.substring(endIndex); if (prefix.equals("extension[")) { String val = jsonField.substring(index+1, endIndex); int extensionIndex = Integer.parseInt(val); JsonNode extensionArray = resourceJson.get("extension"); JsonNode extensionRoot = extensionArray.get(extensionIndex); String extensionUrl = extensionRoot.get("url").asText(); extensionUrl = extensionUrl.replace("http://endeavourhealth.org/fhir/StructureDefinition/", ""); extensionUrl = extensionUrl.replace("http://hl7.org/fhir/StructureDefinition/", ""); jsonField = prefix + extensionUrl + suffix; } else { jsonField = prefix + "n" + suffix; } index = jsonField.indexOf("[", endIndex); } String colName = hmCols.get(new Integer(col)); String fileTypeAndCol = fileTypeDesc + ":" + colName; List<String> fieldNameMappings = hmResourceType.get(jsonField); if (fieldNameMappings == null) { fieldNameMappings = new ArrayList<>(); hmResourceType.put(jsonField, fieldNameMappings); } if (!fieldNameMappings.contains(fileTypeAndCol)) { fieldNameMappings.add(fileTypeAndCol); } } } count ++; if (count % 500 == 0) { LOG.debug("Done " + count); } } LOG.debug("Done " + count); rs.close(); ehrEntityManager.close(); //create output file List<String> lines = new ArrayList<>(); List<String> resourceTypes = new ArrayList<>(hm.keySet()); Collections.sort(resourceTypes, String.CASE_INSENSITIVE_ORDER); for (String resourceType: resourceTypes) { lines.add("============================================================"); lines.add(resourceType); lines.add("============================================================"); Map<String, List<String>> hmResourceType = hm.get(resourceType); List<String> fields = new ArrayList<>(hmResourceType.keySet()); Collections.sort(fields, String.CASE_INSENSITIVE_ORDER); for (String field: fields) { String linePrefix = field + " = "; List<String> sourceRecords = hmResourceType.get(field); for (String sourceRecord: sourceRecords) { lines.add(linePrefix + sourceRecord); linePrefix = Strings.repeat(" ", linePrefix.length()); } lines.add(""); } lines.add(""); } File f = new File(outputFile); Path p = f.toPath(); Files.write(p, lines, Charset.defaultCharset(), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); LOG.debug("Finished creating transform map from " + table); } catch (Throwable t) { LOG.error("", t); } } private static void fixBartsPatients(UUID serviceId) { LOG.debug("Fixing Barts patients at service " + serviceId); try { EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = session.connection(); int checked = 0; int fixed = 0; ResourceDalI resourceDal = DalProvider.factoryResourceDal(); String sql = "SELECT patient_id FROM patient_search WHERE service_id = '" + serviceId + "';"; Statement s = edsConnection.createStatement(); s.setFetchSize(10000); //don't get all rows at once ResultSet rs = s.executeQuery(sql); LOG.info("Got raw results back"); while (rs.next()) { String patientId = rs.getString(1); ResourceWrapper wrapper = resourceDal.getCurrentVersion(serviceId, ResourceType.Patient.toString(), UUID.fromString(patientId)); if (wrapper == null) { LOG.error("Failed to get recource current for ID " + patientId); continue; } String oldJson = wrapper.getResourceData(); Patient patient = (Patient)FhirSerializationHelper.deserializeResource(oldJson); PatientBuilder patientBuilder = new PatientBuilder(patient); List<String> numbersFromCsv = new ArrayList<>(); if (patient.hasTelecom()) { for (ContactPoint contactPoint: patient.getTelecom()) { if (contactPoint.hasId()) { numbersFromCsv.add(contactPoint.getValue()); } } for (String numberFromCsv: numbersFromCsv) { PPPHOTransformer.removeExistingContactPointWithoutIdByValue(patientBuilder, numberFromCsv); } } List<HumanName> namesFromCsv = new ArrayList<>(); if (patient.hasName()) { for (HumanName name: patient.getName()) { if (name.hasId()) { namesFromCsv.add(name); } } for (HumanName name: namesFromCsv) { PPNAMTransformer.removeExistingNameWithoutIdByValue(patientBuilder, name); } } List<Address> addressesFromCsv = new ArrayList<>(); if (patient.hasAddress()) { for (Address address: patient.getAddress()) { if (address.hasId()) { addressesFromCsv.add(address); } } for (Address address: addressesFromCsv) { PPADDTransformer.removeExistingAddressWithoutIdByValue(patientBuilder, address); } } String newJson = FhirSerializationHelper.serializeResource(patient); if (!newJson.equals(oldJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed ++; } checked ++; if (checked % 1000 == 0) { LOG.debug("Checked " + checked + " fixed " + fixed); } } LOG.debug("Checked " + checked + " fixed " + fixed); rs.close(); s.close(); edsEntityManager.close(); LOG.debug("Finish Fixing Barts patients at service " + serviceId); } catch (Throwable t) { LOG.error("", t); } } private static void postToRabbit(String exchangeName, String srcFile) { LOG.info("Posting to " + exchangeName + " from " + srcFile); try { List<UUID> exchangeIds = new ArrayList<>(); List<String> lines = Files.readAllLines(new File(srcFile).toPath()); for (String line: lines) { if (!Strings.isNullOrEmpty(line)) { try { UUID uuid = UUID.fromString(line); exchangeIds.add(uuid); } catch (Exception ex) { LOG.error("Skipping line " + line); } } } LOG.info("Found " + exchangeIds.size() + " to post to " + exchangeName); continueOrQuit(); LOG.info("Posting " + exchangeIds.size() + " to " + exchangeName); QueueHelper.postToExchange(exchangeIds, exchangeName, null, true); LOG.info("Finished Posting to " + exchangeName+ " from " + srcFile); } catch (Throwable t) { LOG.error("", t); } } /*private static void postToProtocol(String srcFile) { LOG.info("Posting to protocol from " + srcFile); try { List<UUID> exchangeIds = new ArrayList<>(); List<String> lines = Files.readAllLines(new File(srcFile).toPath()); for (String line: lines) { if (!Strings.isNullOrEmpty(line)) { UUID uuid = UUID.fromString(line); exchangeIds.add(uuid); } } LOG.info("Posting " + exchangeIds.size() + " to Protocol queue"); QueueHelper.postToExchange(exchangeIds, "EdsProtocol", null, false); LOG.info("Finished Posting to protocol from " + srcFile); } catch (Throwable t) { LOG.error("", t); } }*/ private static void populateSubscriberUprnTable(String subscriberConfigName) throws Exception { LOG.info("Populating Subscriber UPRN Table for " + subscriberConfigName); try { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); //changed the format of the JSON JsonNode pseudoNode = config.get("pseudonymisation"); boolean pseudonymised = pseudoNode != null; byte[] saltBytes = null; if (pseudonymised) { JsonNode saltNode = pseudoNode.get("salt"); String base64Salt = saltNode.asText(); saltBytes = Base64.getDecoder().decode(base64Salt); } /*boolean pseudonymised = config.get("pseudonymised").asBoolean(); byte[] saltBytes = null; if (pseudonymised) { JsonNode saltNode = config.get("salt"); String base64Salt = saltNode.asText(); saltBytes = Base64.getDecoder().decode(base64Salt); }*/ Connection subscriberConnection = EnterpriseFiler.openConnection(config); String upsertSql; if (pseudonymised) { upsertSql = "INSERT INTO patient_uprn" + " (patient_id, organization_id, person_id, lsoa_code, pseudo_uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode)" + " VALUES" + " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + " ON DUPLICATE KEY UPDATE" + " organization_id = VALUES(organization_id)," + " person_id = VALUES(person_id)," + " lsoa_code = VALUES(lsoa_code)," + " pseudo_uprn = VALUES(pseudo_uprn)," + " qualifier = VALUES(qualifier)," + " `algorithm` = VALUES(`algorithm`)," + " `match` = VALUES(`match`)," + " no_address = VALUES(no_address)," + " invalid_address = VALUES(invalid_address)," + " missing_postcode = VALUES(missing_postcode)," + " invalid_postcode = VALUES(invalid_postcode)"; } else { upsertSql = "INSERT INTO patient_uprn" + " (patient_id, organization_id, person_id, lsoa_code, uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode)" + " VALUES" + " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + " ON DUPLICATE KEY UPDATE" + " organization_id = VALUES(organization_id)," + " person_id = VALUES(person_id)," + " lsoa_code = VALUES(lsoa_code)," + " uprn = VALUES(uprn)," + " qualifier = VALUES(qualifier)," + " `algorithm` = VALUES(`algorithm`)," + " `match` = VALUES(`match`)," + " no_address = VALUES(no_address)," + " invalid_address = VALUES(invalid_address)," + " missing_postcode = VALUES(missing_postcode)," + " invalid_postcode = VALUES(invalid_postcode)"; } PreparedStatement psUpsert = subscriberConnection.prepareStatement(upsertSql); int inBatch = 0; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = session.connection(); EnterpriseIdDalI enterpriseIdDal = DalProvider.factoryEnterpriseIdDal(subscriberConfigName); PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal(); PostcodeDalI postcodeDal = DalProvider.factoryPostcodeDal(); int checked = 0; int saved = 0; String sql = "SELECT service_id, patient_id, uprn, qualifier, abp_address, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode FROM patient_address_uprn"; Statement s = edsConnection.createStatement(); s.setFetchSize(10000); //don't get all rows at once ResultSet rs = s.executeQuery(sql); LOG.info("Got raw results back"); while (rs.next()) { int col = 1; String serviceId = rs.getString(col++); String patientId = rs.getString(col++); Long uprn = rs.getLong(col++); if (rs.wasNull()) { uprn = null; } String qualifier = rs.getString(col++); String abpAddress = rs.getString(col++); String algorithm = rs.getString(col++); String match = rs.getString(col++); boolean noAddress = rs.getBoolean(col++); boolean invalidAddress = rs.getBoolean(col++); boolean missingPostcode = rs.getBoolean(col++); boolean invalidPostcode = rs.getBoolean(col++); //check if patient ID already exists in the subscriber DB Long subscriberPatientId = enterpriseIdDal.findEnterpriseId(ResourceType.Patient.toString(), patientId); //if the patient doesn't exist on this subscriber DB, then don't transform this record if (subscriberPatientId != null) { Long subscriberOrgId = enterpriseIdDal.findEnterpriseOrganisationId(serviceId); String discoveryPersonId = patientLinkDal.getPersonId(patientId); Long subscriberPersonId = enterpriseIdDal.findOrCreateEnterprisePersonId(discoveryPersonId); String lsoaCode = null; if (!Strings.isNullOrEmpty(abpAddress)) { String[] toks = abpAddress.split(" "); String postcode = toks[toks.length - 1]; PostcodeLookup postcodeReference = postcodeDal.getPostcodeReference(postcode); if (postcodeReference != null) { lsoaCode = postcodeReference.getLsoaCode(); } } col = 1; psUpsert.setLong(col++, subscriberPatientId); psUpsert.setLong(col++, subscriberOrgId); psUpsert.setLong(col++, subscriberPersonId); psUpsert.setString(col++, lsoaCode); if (pseudonymised) { String pseuoUprn = null; if (uprn != null) { TreeMap<String, String> keys = new TreeMap<>(); keys.put("UPRN", "" + uprn); Crypto crypto = new Crypto(); crypto.SetEncryptedSalt(saltBytes); pseuoUprn = crypto.GetDigest(keys); } psUpsert.setString(col++, pseuoUprn); } else { if (uprn != null) { psUpsert.setLong(col++, uprn.longValue()); } else { psUpsert.setNull(col++, Types.BIGINT); } } psUpsert.setString(col++, qualifier); psUpsert.setString(col++, algorithm); psUpsert.setString(col++, match); psUpsert.setBoolean(col++, noAddress); psUpsert.setBoolean(col++, invalidAddress); psUpsert.setBoolean(col++, missingPostcode); psUpsert.setBoolean(col++, invalidPostcode); //LOG.debug("" + psUpsert); psUpsert.addBatch(); inBatch++; saved++; if (inBatch >= TransformConfig.instance().getResourceSaveBatchSize()) { psUpsert.executeBatch(); subscriberConnection.commit(); inBatch = 0; } } checked ++; if (checked % 1000 == 0) { LOG.info("Checked " + checked + " Saved " + saved); } } if (inBatch > 0) { psUpsert.executeBatch(); subscriberConnection.commit(); } LOG.info("Chcked " + checked + " Saved " + saved); psUpsert.close(); subscriberConnection.close(); edsEntityManager.close(); LOG.info("Finished Populating Subscriber UPRN Table for " + subscriberConfigName); } catch (Throwable t) { LOG.error("", t); } } private static void fixPersonsNoNhsNumber() { LOG.info("Fixing persons with no NHS number"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); EntityManager entityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection patientSearchConnection = session.connection(); Statement patientSearchStatement = patientSearchConnection.createStatement(); for (Service service: services) { LOG.info("Doing " + service.getName() + " " + service.getId()); int checked = 0; int fixedPersons = 0; int fixedSearches = 0; String sql = "SELECT patient_id, nhs_number FROM patient_search WHERE service_id = '" + service.getId() + "' AND (nhs_number IS NULL or CHAR_LENGTH(nhs_number) != 10)"; ResultSet rs = patientSearchStatement.executeQuery(sql); while (rs.next()) { String patientId = rs.getString(1); String nhsNumber = rs.getString(2); //find matched person ID String personIdSql = "SELECT person_id FROM patient_link WHERE patient_id = '" + patientId + "'"; Statement s = patientSearchConnection.createStatement(); ResultSet rsPersonId = s.executeQuery(personIdSql); String personId = null; if (rsPersonId.next()) { personId = rsPersonId.getString(1); } rsPersonId.close(); s.close(); if (Strings.isNullOrEmpty(personId)) { LOG.error("Patient " + patientId + " has no person ID"); continue; } //see whether person ID used NHS number to match String patientLinkSql = "SELECT nhs_number FROM patient_link_person WHERE person_id = '" + personId + "'"; s = patientSearchConnection.createStatement(); ResultSet rsPatientLink = s.executeQuery(patientLinkSql); String matchingNhsNumber = null; if (rsPatientLink.next()) { matchingNhsNumber = rsPatientLink.getString(1); } rsPatientLink.close(); s.close(); //if patient link person has a record for this nhs number, update the person link if (!Strings.isNullOrEmpty(matchingNhsNumber)) { String newPersonId = UUID.randomUUID().toString(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String createdAtStr = sdf.format(new Date()); s = patientSearchConnection.createStatement(); //new record in patient link history String patientHistorySql = "INSERT INTO patient_link_history VALUES ('" + patientId + "', '" + service.getId() + "', '" + createdAtStr + "', '" + newPersonId + "', '" + personId + "')"; //LOG.debug(patientHistorySql); s.execute(patientHistorySql); //update patient link String patientLinkUpdateSql = "UPDATE patient_link SET person_id = '" + newPersonId + "' WHERE patient_id = '" + patientId + "'"; s.execute(patientLinkUpdateSql); patientSearchConnection.commit(); s.close(); fixedPersons ++; } //if patient search has an invalid NHS number, update it if (!Strings.isNullOrEmpty(nhsNumber)) { ResourceDalI resourceDal = DalProvider.factoryResourceDal(); Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(service.getId(), ResourceType.Patient, patientId); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); patientSearchDal.update(service.getId(), patient); fixedSearches ++; } checked ++; if (checked % 50 == 0) { LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches); } } LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches); rs.close(); } patientSearchStatement.close(); entityManager.close(); LOG.info("Finished fixing persons with no NHS number"); } catch (Throwable t) { LOG.error("", t); } } private static void checkDeletedObs(UUID serviceId, UUID systemId) { LOG.info("Checking Observations for " + serviceId); try { ResourceDalI resourceDal = DalProvider.factoryResourceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); List<ResourceType> potentialResourceTypes = new ArrayList<>(); potentialResourceTypes.add(ResourceType.Procedure); potentialResourceTypes.add(ResourceType.AllergyIntolerance); potentialResourceTypes.add(ResourceType.FamilyMemberHistory); potentialResourceTypes.add(ResourceType.Immunization); potentialResourceTypes.add(ResourceType.DiagnosticOrder); potentialResourceTypes.add(ResourceType.Specimen); potentialResourceTypes.add(ResourceType.DiagnosticReport); potentialResourceTypes.add(ResourceType.ReferralRequest); potentialResourceTypes.add(ResourceType.Condition); potentialResourceTypes.add(ResourceType.Observation); List<String> subscriberConfigs = new ArrayList<>(); subscriberConfigs.add("ceg_data_checking"); subscriberConfigs.add("ceg_enterprise"); subscriberConfigs.add("hurley_data_checking"); subscriberConfigs.add("hurley_deidentified"); Set<String> observationsNotDeleted = new HashSet<>(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (Exchange exchange : exchanges) { List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); ExchangePayloadFile firstItem = payload.get(0); //String version = EmisCsvToFhirTransformer.determineVersion(payload); //if we've reached the point before we process data for this practice, break out try { if (!EmisCsvToFhirTransformer.shouldProcessPatientData(payload)) { break; } } catch (TransformException e) { LOG.info("Skipping exchange containing " + firstItem.getPath()); continue; } String name = FilenameUtils.getBaseName(firstItem.getPath()); String[] toks = name.split("_"); String agreementId = toks[4]; LOG.info("Doing exchange containing " + firstItem.getPath()); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true); Map<UUID, ExchangeBatch> hmBatchesByPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId()); for (ExchangeBatch batch : batches) { if (batch.getEdsPatientId() != null) { hmBatchesByPatient.put(batch.getEdsPatientId(), batch); } } for (ExchangePayloadFile item : payload) { String type = item.getType(); if (type.equals("CareRecord_Observation")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String deleted = record.get("Deleted"); String observationId = record.get("ObservationGuid"); if (deleted.equalsIgnoreCase("true")) { //if observation was reinstated at some point, skip it if (observationsNotDeleted.contains(observationId)) { continue; } String patientId = record.get("PatientGuid"); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientId); CsvCell observationCell = CsvCell.factoryDummyWrapper(observationId); Set<ResourceType> resourceTypes = org.endeavourhealth.transform.emis.csv.transforms.careRecord.ObservationTransformer.findOriginalTargetResourceTypes(csvHelper, patientCell, observationCell); for (ResourceType resourceType: resourceTypes) { //will already have been done OK if (resourceType == ResourceType.Observation) { continue; } String sourceId = patientId + ":" + observationId; UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId); if (uuid == null) { throw new Exception("Failed to find UUID for " + resourceType + " " + sourceId); } LOG.debug("Fixing " + resourceType + " " + uuid); //create file of IDs to delete for each subscriber DB for (String subscriberConfig : subscriberConfigs) { EnterpriseIdDalI subscriberDal = DalProvider.factoryEnterpriseIdDal(subscriberConfig); Long enterpriseId = subscriberDal.findEnterpriseId(resourceType.toString(), uuid.toString()); if (enterpriseId == null) { continue; } String sql = null; if (resourceType == ResourceType.AllergyIntolerance) { sql = "DELETE FROM allergy_intolerance WHERE id = " + enterpriseId; } else if (resourceType == ResourceType.ReferralRequest) { sql = "DELETE FROM referral_request WHERE id = " + enterpriseId; } else { sql = "DELETE FROM observation WHERE id = " + enterpriseId; } sql += "\n"; File f = new File(subscriberConfig + ".sql"); Files.write(f.toPath(), sql.getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); } //delete resource if not already done ResourceWrapper resourceWrapper = resourceDal.getCurrentVersion(serviceId, resourceType.toString(), uuid); if (resourceWrapper != null && !resourceWrapper.isDeleted()) { ExchangeBatch batch = hmBatchesByPatient.get(resourceWrapper.getPatientId()); resourceWrapper.setDeleted(true); resourceWrapper.setResourceData(null); resourceWrapper.setResourceMetadata(""); resourceWrapper.setExchangeBatchId(batch.getBatchId()); resourceWrapper.setVersion(UUID.randomUUID()); resourceWrapper.setCreatedAt(new Date()); resourceWrapper.setExchangeId(exchange.getId()); resourceDal.delete(resourceWrapper); } } } else { observationsNotDeleted.add(observationId); } } parser.close(); } } } LOG.info("Finished Checking Observations for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } } private static void testBatchInserts(String url, String user, String pass, String num, String batchSizeStr) { LOG.info("Testing Batch Inserts"); try { int inserts = Integer.parseInt(num); int batchSize = Integer.parseInt(batchSizeStr); LOG.info("Openning Connection"); Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", pass); Connection conn = DriverManager.getConnection(url, props); //String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?);"; String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?)"; PreparedStatement ps = conn.prepareStatement(sql); if (batchSize == 1) { LOG.info("Testing non-batched inserts"); long start = System.currentTimeMillis(); for (int i = 0; i < inserts; i++) { int col = 1; ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, randomStr()); ps.execute(); } long end = System.currentTimeMillis(); LOG.info("Done " + inserts + " in " + (end - start) + " ms"); } else { LOG.info("Testing batched inserts with batch size " + batchSize); long start = System.currentTimeMillis(); for (int i = 0; i < inserts; i++) { int col = 1; ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, randomStr()); ps.addBatch(); if ((i + 1) % batchSize == 0 || i + 1 >= inserts) { ps.executeBatch(); } } long end = System.currentTimeMillis(); LOG.info("Done " + inserts + " in " + (end - start) + " ms"); } ps.close(); conn.close(); LOG.info("Finished Testing Batch Inserts"); } catch (Exception ex) { LOG.error("", ex); } } private static String randomStr() { StringBuffer sb = new StringBuffer(); Random r = new Random(System.currentTimeMillis()); while (sb.length() < 1100) { sb.append(r.nextLong()); } return sb.toString(); } /*private static void fixEmisProblems(UUID serviceId, UUID systemId) { LOG.info("Fixing Emis Problems for " + serviceId); try { Map<String, List<String>> hmReferences = new HashMap<>(); Set<String> patientIds = new HashSet<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null); LOG.info("Caching problem links"); //Go through all files to work out problem children for every problem ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); //String version = EmisCsvToFhirTransformer.determineVersion(payload); ExchangePayloadFile firstItem = payload.get(0); String name = FilenameUtils.getBaseName(firstItem.getPath()); String[] toks = name.split("_"); String agreementId = toks[4]; LOG.info("Doing exchange containing " + firstItem.getPath()); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true); for (ExchangePayloadFile item: payload) { String type = item.getType(); if (type.equals("CareRecord_Observation")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("ObservationGuid"); String localId = patientId + ":" + observationId; ResourceType resourceType = ObservationTransformer.findOriginalTargetResourceType(filer, CsvCell.factoryDummyWrapper(patientId), CsvCell.factoryDummyWrapper(observationId)); Reference localReference = ReferenceHelper.createReference(resourceType, localId); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else if (type.equals("Prescribing_DrugRecord")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemObservationGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("DrugRecordGuid"); String localId = patientId + ":" + observationId; Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, localId); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else if (type.equals("Prescribing_IssueRecord")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemObservationGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("IssueRecordGuid"); String localId = patientId + ":" + observationId; Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, localId); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else { //no problem link } } } LOG.info("Finished caching problem links, finding " + patientIds.size() + " patients"); int done = 0; int fixed = 0; for (String localPatientId: patientIds) { Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId); Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer); String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference); List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), ResourceType.Condition.toString()); for (ResourceWrapper wrapper: wrappers) { if (wrapper.isDeleted()) { continue; } String originalJson = wrapper.getResourceData(); Condition condition = (Condition)FhirSerializationHelper.deserializeResource(originalJson); ConditionBuilder conditionBuilder = new ConditionBuilder(condition); //sort out the nested extension references Extension outerExtension = ExtensionConverter.findExtension(condition, FhirExtensionUri.PROBLEM_LAST_REVIEWED); if (outerExtension != null) { Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_LAST_REVIEWED__PERFORMER); if (innerExtension != null) { Reference performerReference = (Reference)innerExtension.getValue(); String value = performerReference.getReference(); if (value.endsWith("}")) { Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer); innerExtension.setValue(globalPerformerReference); } } } //sort out the contained list of children ContainedListBuilder listBuilder = new ContainedListBuilder(conditionBuilder); //remove any existing children listBuilder.removeContainedList(); //add all the new ones we've found List<String> localChildReferences = hmReferences.get(wrapper.getResourceId().toString()); if (localChildReferences != null) { for (String localChildReference: localChildReferences) { Reference reference = ReferenceHelper.createReference(localChildReference); listBuilder.addContainedListItem(reference); } } //save the updated condition String newJson = FhirSerializationHelper.serializeResource(condition); if (!newJson.equals(originalJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed ++; } } done ++; if (done % 1000 == 0) { LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); } } LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); LOG.info("Finished Emis Problems for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } }*/ private static void fixEmisProblems3ForPublisher(String publisher, UUID systemId) { try { LOG.info("Doing fix for " + publisher); String[] done = new String[]{ "01fcfe94-5dfd-4951-b74d-129f874209b0", "07a267d3-189b-4968-b9b0-547de28edef5", "0b9601d1-f7ab-4f5d-9f77-1841050f75ab", "0fd2ff5d-2c25-4707-afe8-707e81a250b8", "14276da8-c344-4841-a36d-aa38940e78e7", "158251ca-0e1d-4471-8fae-250b875911e1", "160131e2-a5ff-49c8-b62e-ae499a096193", "16490f2b-62ce-44c6-9816-528146272340", "18fa1bed-b9a0-4d55-a0cc-dfc31831259a", "19cba169-d41e-424a-812f-575625c72305", "19ff6a03-25df-4e61-9ab1-4573cfd24729", "1b3d1627-f49e-4103-92d6-af6016476da3", "1e198fbb-c9cd-429a-9b50-0f124d0d825c", "20444fbe-0802-46fc-8203-339a36f52215", "21e27bf3-8071-48dd-924f-1d8d21f9216f", "23203e72-a3b0-4577-9942-30f7cdff358e", "23be1f4a-68ec-4a49-b2ec-aa9109c99dcd", "2b56033f-a9b4-4bab-bb53-c619bdb38895", "2ba26f2d-8068-4b77-8e62-431edfc2c2e2", "2ed89931-0ce7-49ea-88ac-7266b6c03be0", "3abf8ded-f1b1-495b-9a2d-5d0223e33fa7", "3b0f6720-2ffd-4f8a-afcd-7e3bb311212d", "415b509a-cf39-45bc-9acf-7f982a00e159", "4221276f-a3b0-4992-b426-ec2d8c7347f2", "49868211-d868-4b55-a201-5acac0be0cc0", "55fdcbd0-9b2d-493a-b874-865ccc93a156", "56124545-d266-4da9-ba1f-b3a16edc7f31", "6c11453b-dbf8-4749-a0ec-ab705920e316" }; ServiceDalI dal = DalProvider.factoryServiceDal(); List<Service> all = dal.getAll(); for (Service service: all) { if (service.getPublisherConfigName() != null && service.getPublisherConfigName().equals(publisher)) { boolean alreadyDone = false; String idStr = service.getId().toString(); for (String doneId: done) { if (idStr.equalsIgnoreCase(doneId)) { alreadyDone = true; break; } } if (alreadyDone) { continue; } fixEmisProblems3(service.getId(), systemId); } } LOG.info("Done fix for " + publisher); } catch (Throwable t) { LOG.error("", t); } } private static void fixEmisProblems3(UUID serviceId, UUID systemId) { LOG.info("Fixing Emis Problems 3 for " + serviceId); try { Set<String> patientIds = new HashSet<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null); LOG.info("Finding patients"); //Go through all files to work out problem children for every problem ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); for (ExchangePayloadFile item: payload) { String type = item.getType(); if (type.equals("Admin_Patient")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String patientId = record.get("PatientGuid"); patientIds.add(patientId); } parser.close(); } } } LOG.info("Finished checking files, finding " + patientIds.size() + " patients"); int done = 0; int fixed = 0; for (String localPatientId: patientIds) { Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId); Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer); String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference); List<ResourceType> potentialResourceTypes = new ArrayList<>(); potentialResourceTypes.add(ResourceType.Procedure); potentialResourceTypes.add(ResourceType.AllergyIntolerance); potentialResourceTypes.add(ResourceType.FamilyMemberHistory); potentialResourceTypes.add(ResourceType.Immunization); potentialResourceTypes.add(ResourceType.DiagnosticOrder); potentialResourceTypes.add(ResourceType.Specimen); potentialResourceTypes.add(ResourceType.DiagnosticReport); potentialResourceTypes.add(ResourceType.ReferralRequest); potentialResourceTypes.add(ResourceType.Condition); potentialResourceTypes.add(ResourceType.Observation); for (ResourceType resourceType: potentialResourceTypes) { List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), resourceType.toString()); for (ResourceWrapper wrapper : wrappers) { if (wrapper.isDeleted()) { continue; } String originalJson = wrapper.getResourceData(); DomainResource resource = (DomainResource)FhirSerializationHelper.deserializeResource(originalJson); //Also go through all observation records and any that have parent observations - these need fixing too??? Extension extension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PARENT_RESOURCE); if (extension != null) { Reference reference = (Reference)extension.getValue(); fixReference(serviceId, filer, reference, potentialResourceTypes); } if (resource instanceof Observation) { Observation obs = (Observation)resource; if (obs.hasRelated()) { for (Observation.ObservationRelatedComponent related: obs.getRelated()) { if (related.hasTarget()) { Reference reference = related.getTarget(); fixReference(serviceId, filer, reference, potentialResourceTypes); } } } } if (resource instanceof DiagnosticReport) { DiagnosticReport diag = (DiagnosticReport)resource; if (diag.hasResult()) { for (Reference reference: diag.getResult()) { fixReference(serviceId, filer, reference, potentialResourceTypes); } } } //Go through all patients, go through all problems, for any child that's Observation, find the true resource type then update and save if (resource instanceof Condition) { if (resource.hasContained()) { for (Resource contained: resource.getContained()) { if (contained.getId().equals("Items")) { List_ containedList = (List_)contained; if (containedList.hasEntry()) { for (List_.ListEntryComponent entry: containedList.getEntry()) { Reference reference = entry.getItem(); fixReference(serviceId, filer, reference, potentialResourceTypes); } } } } } //sort out the nested extension references Extension outerExtension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PROBLEM_RELATED); if (outerExtension != null) { Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_RELATED__TARGET); if (innerExtension != null) { Reference performerReference = (Reference)innerExtension.getValue(); String value = performerReference.getReference(); if (value.endsWith("}")) { Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer); innerExtension.setValue(globalPerformerReference); } } } } //save the updated condition String newJson = FhirSerializationHelper.serializeResource(resource); if (!newJson.equals(originalJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed++; } } } done ++; if (done % 1000 == 0) { LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); } } LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); LOG.info("Finished Emis Problems 3 for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } } private static boolean fixReference(UUID serviceId, HasServiceSystemAndExchangeIdI csvHelper, Reference reference, List<ResourceType> potentialResourceTypes) throws Exception { //if it's already something other than observation, we're OK ReferenceComponents comps = ReferenceHelper.getReferenceComponents(reference); if (comps.getResourceType() != ResourceType.Observation) { return false; } Reference sourceReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, reference); String sourceId = ReferenceHelper.getReferenceId(sourceReference); String newReferenceValue = findTrueResourceType(serviceId, potentialResourceTypes, sourceId); if (newReferenceValue == null) { return false; } reference.setReference(newReferenceValue); return true; } private static String findTrueResourceType(UUID serviceId, List<ResourceType> potentials, String sourceId) throws Exception { ResourceDalI dal = DalProvider.factoryResourceDal(); for (ResourceType resourceType: potentials) { UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId); if (uuid == null) { continue; } ResourceWrapper wrapper = dal.getCurrentVersion(serviceId, resourceType.toString(), uuid); if (wrapper != null) { return ReferenceHelper.createResourceReference(resourceType, uuid.toString()); } } return null; } /*private static void convertExchangeBody(UUID systemUuid) { try { LOG.info("Converting exchange bodies for system " + systemUuid); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemUuid, Integer.MAX_VALUE); if (exchanges.isEmpty()) { continue; } LOG.debug("doing " + service.getName() + " with " + exchanges.size() + " exchanges"); for (Exchange exchange: exchanges) { String exchangeBody = exchange.getBody(); try { //already done ExchangePayloadFile[] files = JsonSerializer.deserialize(exchangeBody, ExchangePayloadFile[].class); continue; } catch (JsonSyntaxException ex) { //if the JSON can't be parsed, then it'll be the old format of body that isn't JSON } List<ExchangePayloadFile> newFiles = new ArrayList<>(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); for (String file: files) { ExchangePayloadFile fileObj = new ExchangePayloadFile(); String fileWithoutSharedStorage = file.substring(TransformConfig.instance().getSharedStoragePath().length()+1); fileObj.setPath(fileWithoutSharedStorage); //size List<FileInfo> fileInfos = FileHelper.listFilesInSharedStorageWithInfo(file); for (FileInfo info: fileInfos) { if (info.getFilePath().equals(file)) { long size = info.getSize(); fileObj.setSize(new Long(size)); } } //type if (systemUuid.toString().equalsIgnoreCase("991a9068-01d3-4ff2-86ed-249bd0541fb3") //live || systemUuid.toString().equalsIgnoreCase("55c08fa5-ef1e-4e94-aadc-e3d6adc80774")) { //dev //emis String name = FilenameUtils.getName(file); String[] toks = name.split("_"); String first = toks[1]; String second = toks[2]; fileObj.setType(first + "_" + second); *//* } else if (systemUuid.toString().equalsIgnoreCase("e517fa69-348a-45e9-a113-d9b59ad13095") || systemUuid.toString().equalsIgnoreCase("b0277098-0b6c-4d9d-86ef-5f399fb25f34")) { //dev //cerner String name = FilenameUtils.getName(file); if (Strings.isNullOrEmpty(name)) { continue; } try { String type = BartsCsvToFhirTransformer.identifyFileType(name); fileObj.setType(type); } catch (Exception ex2) { throw new Exception("Failed to parse file name " + name + " on exchange " + exchange.getId()); }*//* } else { throw new Exception("Unknown system ID " + systemUuid); } newFiles.add(fileObj); } String json = JsonSerializer.serialize(newFiles); exchange.setBody(json); exchangeDal.save(exchange); } } LOG.info("Finished Converting exchange bodies for system " + systemUuid); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixBartsOrgs(String serviceId) { try { LOG.info("Fixing Barts orgs"); ResourceDalI dal = DalProvider.factoryResourceDal(); List<ResourceWrapper> wrappers = dal.getResourcesByService(UUID.fromString(serviceId), ResourceType.Organization.toString()); LOG.debug("Found " + wrappers.size() + " resources"); int done = 0; int fixed = 0; for (ResourceWrapper wrapper: wrappers) { if (!wrapper.isDeleted()) { List<ResourceWrapper> history = dal.getResourceHistory(UUID.fromString(serviceId), wrapper.getResourceType(), wrapper.getResourceId()); ResourceWrapper mostRecent = history.get(0); String json = mostRecent.getResourceData(); Organization org = (Organization)FhirSerializationHelper.deserializeResource(json); String odsCode = IdentifierHelper.findOdsCode(org); if (Strings.isNullOrEmpty(odsCode) && org.hasIdentifier()) { boolean hasBeenFixed = false; for (Identifier identifier: org.getIdentifier()) { if (identifier.getSystem().equals(FhirIdentifierUri.IDENTIFIER_SYSTEM_ODS_CODE) && identifier.hasId()) { odsCode = identifier.getId(); identifier.setValue(odsCode); identifier.setId(null); hasBeenFixed = true; } } if (hasBeenFixed) { String newJson = FhirSerializationHelper.serializeResource(org); mostRecent.setResourceData(newJson); LOG.debug("Fixed Organization " + org.getId()); *//*LOG.debug(json); LOG.debug(newJson);*//* saveResourceWrapper(UUID.fromString(serviceId), mostRecent); fixed ++; } } } done ++; if (done % 100 == 0) { LOG.debug("Done " + done + ", Fixed " + fixed); } } LOG.debug("Done " + done + ", Fixed " + fixed); LOG.info("Finished Barts orgs"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testPreparedStatements(String url, String user, String pass, String serviceId) { try { LOG.info("Testing Prepared Statements"); LOG.info("Url: " + url); LOG.info("user: " + user); LOG.info("pass: " + pass); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); //create connection Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", pass); Connection conn = DriverManager.getConnection(url, props); String sql = "SELECT * FROM internal_id_map WHERE service_id = ? AND id_type = ? AND source_id = ?"; long start = System.currentTimeMillis(); for (int i=0; i<10000; i++) { PreparedStatement ps = null; try { ps = conn.prepareStatement(sql); ps.setString(1, serviceId); ps.setString(2, "MILLPERSIDtoMRN"); ps.setString(3, UUID.randomUUID().toString()); ResultSet rs = ps.executeQuery(); while (rs.next()) { //do nothing } } finally { if (ps != null) { ps.close(); } } } long end = System.currentTimeMillis(); LOG.info("Took " + (end-start) + " ms"); //close connection conn.close(); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixEncounters(String table) { LOG.info("Fixing encounters from " + table); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); Date cutoff = sdf.parse("2018-03-14 11:42"); EntityManager entityManager = ConnectionManager.getAdminEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); List<UUID> serviceIds = new ArrayList<>(); Map<UUID, UUID> hmSystems = new HashMap<>(); String sql = "SELECT service_id, system_id FROM " + table + " WHERE done = 0"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { UUID serviceId = UUID.fromString(rs.getString(1)); UUID systemId = UUID.fromString(rs.getString(2)); serviceIds.add(serviceId); hmSystems.put(serviceId, systemId); } rs.close(); statement.close(); entityManager.close(); for (UUID serviceId: serviceIds) { UUID systemId = hmSystems.get(serviceId); LOG.info("Doing service " + serviceId + " and system " + systemId); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, systemId); List<UUID> exchangeIdsToProcess = new ArrayList<>(); for (UUID exchangeId: exchangeIds) { List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId); for (ExchangeTransformAudit audit: audits) { Date d = audit.getStarted(); if (d.after(cutoff)) { exchangeIdsToProcess.add(exchangeId); break; } } } Map<String, ReferenceList> consultationNewChildMap = new HashMap<>(); Map<String, ReferenceList> observationChildMap = new HashMap<>(); Map<String, ReferenceList> newProblemChildren = new HashMap<>(); for (UUID exchangeId: exchangeIdsToProcess) { Exchange exchange = exchangeDal.getExchange(exchangeId); String[] files = ExchangeHelper.parseExchangeBodyIntoFileList(exchange.getBody()); String version = EmisCsvToFhirTransformer.determineVersion(files); List<String> interestingFiles = new ArrayList<>(); for (String file: files) { if (file.indexOf("CareRecord_Consultation") > -1 || file.indexOf("CareRecord_Observation") > -1 || file.indexOf("CareRecord_Diary") > -1 || file.indexOf("Prescribing_DrugRecord") > -1 || file.indexOf("Prescribing_IssueRecord") > -1 || file.indexOf("CareRecord_Problem") > -1) { interestingFiles.add(file); } } files = interestingFiles.toArray(new String[0]); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchangeId, files, version, parsers); String dataSharingAgreementGuid = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(parsers); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchangeId, dataSharingAgreementGuid, true); Consultation consultationParser = (Consultation)parsers.get(Consultation.class); while (consultationParser.nextRecord()) { CsvCell consultationGuid = consultationParser.getConsultationGuid(); CsvCell patientGuid = consultationParser.getPatientGuid(); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); consultationNewChildMap.put(sourceId, new ReferenceList()); } Problem problemParser = (Problem)parsers.get(Problem.class); while (problemParser.nextRecord()) { CsvCell problemGuid = problemParser.getObservationGuid(); CsvCell patientGuid = problemParser.getPatientGuid(); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); newProblemChildren.put(sourceId, new ReferenceList()); } //run this pre-transformer to pre-cache some stuff in the csv helper, which //is needed when working out the resource type that each observation would be saved as ObservationPreTransformer.transform(version, parsers, null, csvHelper); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { CsvCell observationGuid = observationParser.getObservationGuid(); CsvCell patientGuid = observationParser.getPatientGuid(); String obSourceId = EmisCsvHelper.createUniqueId(patientGuid, observationGuid); CsvCell codeId = observationParser.getCodeId(); if (codeId.isEmpty()) { continue; } ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper); UUID obUuid = IdHelper.getEdsResourceId(serviceId, resourceType, obSourceId); if (obUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + resourceType + " and source ID " + obSourceId); //resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper); } Reference obReference = ReferenceHelper.createReference(resourceType, obUuid.toString()); CsvCell consultationGuid = observationParser.getConsultationGuid(); if (!consultationGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); ReferenceList referenceList = consultationNewChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); consultationNewChildMap.put(sourceId, referenceList); } referenceList.add(obReference); } CsvCell problemGuid = observationParser.getProblemGuid(); if (!problemGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(obReference); } CsvCell parentObGuid = observationParser.getParentObservationGuid(); if (!parentObGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, parentObGuid); ReferenceList referenceList = observationChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); observationChildMap.put(sourceId, referenceList); } referenceList.add(obReference); } } Diary diaryParser = (Diary)parsers.get(Diary.class); while (diaryParser.nextRecord()) { CsvCell consultationGuid = diaryParser.getConsultationGuid(); if (!consultationGuid.isEmpty()) { CsvCell diaryGuid = diaryParser.getDiaryGuid(); CsvCell patientGuid = diaryParser.getPatientGuid(); String diarySourceId = EmisCsvHelper.createUniqueId(patientGuid, diaryGuid); UUID diaryUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.ProcedureRequest, diarySourceId); if (diaryUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.ProcedureRequest + " and source ID " + diarySourceId); } Reference diaryReference = ReferenceHelper.createReference(ResourceType.ProcedureRequest, diaryUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); ReferenceList referenceList = consultationNewChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); consultationNewChildMap.put(sourceId, referenceList); } referenceList.add(diaryReference); } } IssueRecord issueRecordParser = (IssueRecord)parsers.get(IssueRecord.class); while (issueRecordParser.nextRecord()) { CsvCell problemGuid = issueRecordParser.getProblemObservationGuid(); if (!problemGuid.isEmpty()) { CsvCell issueRecordGuid = issueRecordParser.getIssueRecordGuid(); CsvCell patientGuid = issueRecordParser.getPatientGuid(); String issueRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, issueRecordGuid); UUID issueRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationOrder, issueRecordSourceId); if (issueRecordUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.MedicationOrder + " and source ID " + issueRecordSourceId); } Reference issueRecordReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, issueRecordUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(issueRecordReference); } } DrugRecord drugRecordParser = (DrugRecord)parsers.get(DrugRecord.class); while (drugRecordParser.nextRecord()) { CsvCell problemGuid = drugRecordParser.getProblemObservationGuid(); if (!problemGuid.isEmpty()) { CsvCell drugRecordGuid = drugRecordParser.getDrugRecordGuid(); CsvCell patientGuid = drugRecordParser.getPatientGuid(); String drugRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, drugRecordGuid); UUID drugRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationStatement, drugRecordSourceId); if (drugRecordUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.MedicationStatement + " and source ID " + drugRecordSourceId); } Reference drugRecordReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, drugRecordUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(drugRecordReference); } } for (AbstractCsvParser parser : parsers.values()) { try { parser.close(); } catch (IOException ex) { //don't worry if this fails, as we're done anyway } } } ResourceDalI resourceDal = DalProvider.factoryResourceDal(); LOG.info("Found " + consultationNewChildMap.size() + " Encounters to fix"); for (String encounterSourceId: consultationNewChildMap.keySet()) { ReferenceList childReferences = consultationNewChildMap.get(encounterSourceId); //map to UUID UUID encounterId = IdHelper.getEdsResourceId(serviceId, ResourceType.Encounter, encounterSourceId); if (encounterId == null) { continue; } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Encounter.toString(), encounterId); if (history.isEmpty()) { continue; //throw new Exception("Empty history for Encounter " + encounterId); } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (wrapper.getResourceData() != null) { Encounter encounter = (Encounter) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); EncounterBuilder encounterBuilder = new EncounterBuilder(encounter); ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder); List<Reference> previousChildren = containedListBuilder.getContainedListItems(); childReferences.add(previousChildren); } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Encounter encounter = (Encounter)FhirSerializationHelper.deserializeResource(currentState.getResourceData()); EncounterBuilder encounterBuilder = new EncounterBuilder(encounter); ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder); containedListBuilder.addReferences(childReferences); String newJson = FhirSerializationHelper.serializeResource(encounter); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState);*//* } LOG.info("Found " + observationChildMap.size() + " Parent Observations to fix"); for (String sourceId: observationChildMap.keySet()) { ReferenceList childReferences = observationChildMap.get(sourceId); //map to UUID ResourceType resourceType = null; UUID resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.Observation, sourceId); if (resourceId != null) { resourceType = ResourceType.Observation; } else { resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.DiagnosticReport, sourceId); if (resourceId != null) { resourceType = ResourceType.DiagnosticReport; } else { continue; } } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceType.toString(), resourceId); if (history.isEmpty()) { //throw new Exception("Empty history for " + resourceType + " " + resourceId); continue; } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (resourceType == ResourceType.Observation) { if (wrapper.getResourceData() != null) { Observation observation = (Observation) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); if (observation.hasRelated()) { for (Observation.ObservationRelatedComponent related : observation.getRelated()) { Reference reference = related.getTarget(); childReferences.add(reference); } } } } else { if (wrapper.getResourceData() != null) { DiagnosticReport report = (DiagnosticReport) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); if (report.hasResult()) { for (Reference reference : report.getResult()) { childReferences.add(reference); } } } } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Resource resource = FhirSerializationHelper.deserializeResource(currentState.getResourceData()); boolean changed = false; if (resourceType == ResourceType.Observation) { ObservationBuilder resourceBuilder = new ObservationBuilder((Observation)resource); for (int i=0; i<childReferences.size(); i++) { Reference reference = childReferences.getReference(i); if (resourceBuilder.addChildObservation(reference)) { changed = true; } } } else { DiagnosticReportBuilder resourceBuilder = new DiagnosticReportBuilder((DiagnosticReport)resource); for (int i=0; i<childReferences.size(); i++) { Reference reference = childReferences.getReference(i); if (resourceBuilder.addResult(reference)) { changed = true; } } } if (changed) { String newJson = FhirSerializationHelper.serializeResource(resource); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); }*//* } LOG.info("Found " + newProblemChildren.size() + " Problems to fix"); for (String sourceId: newProblemChildren.keySet()) { ReferenceList childReferences = newProblemChildren.get(sourceId); //map to UUID UUID conditionId = IdHelper.getEdsResourceId(serviceId, ResourceType.Condition, sourceId); if (conditionId == null) { continue; } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Condition.toString(), conditionId); if (history.isEmpty()) { continue; //throw new Exception("Empty history for Condition " + conditionId); } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (wrapper.getResourceData() != null) { Condition previousVersion = (Condition) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); ConditionBuilder conditionBuilder = new ConditionBuilder(previousVersion); ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder); List<Reference> previousChildren = containedListBuilder.getContainedListItems(); childReferences.add(previousChildren); } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Condition condition = (Condition)FhirSerializationHelper.deserializeResource(currentState.getResourceData()); ConditionBuilder conditionBuilder = new ConditionBuilder(condition); ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder); containedListBuilder.addReferences(childReferences); String newJson = FhirSerializationHelper.serializeResource(condition); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState);*//* } //mark as done String updateSql = "UPDATE " + table + " SET done = 1 WHERE service_id = '" + serviceId + "';"; entityManager = ConnectionManager.getAdminEntityManager(); session = (SessionImpl)entityManager.getDelegate(); connection = session.connection(); statement = connection.createStatement(); entityManager.getTransaction().begin(); statement.executeUpdate(updateSql); entityManager.getTransaction().commit(); } *//** * For each practice: Go through all files processed since 14 March Cache all links as above Cache all Encounters saved too For each Encounter referenced at all: Retrieve latest version from resource current Retrieve version prior to 14 March Update current version with old references plus new ones For each parent observation: Retrieve latest version (could be observation or diagnostic report) For each problem: Retrieve latest version from resource current Check if still a problem: Retrieve version prior to 14 March Update current version with old references plus new ones *//* LOG.info("Finished Fixing encounters from " + table); } catch (Throwable t) { LOG.error("", t); } }*/ private static void saveResourceWrapper(UUID serviceId, ResourceWrapper wrapper) throws Exception { if (wrapper.getVersion() == null) { throw new Exception("Can't update resource history without version UUID"); } if (wrapper.getResourceData() != null) { long checksum = FhirStorageService.generateChecksum(wrapper.getResourceData()); wrapper.setResourceChecksum(new Long(checksum)); } EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); entityManager.getTransaction().begin(); String json = wrapper.getResourceData(); json = json.replace("'", "''"); json = json.replace("\\", "\\\\"); String patientId = ""; if (wrapper.getPatientId() != null) { patientId = wrapper.getPatientId().toString(); } String updateSql = "UPDATE resource_current" + " SET resource_data = '" + json + "'," + " resource_checksum = " + wrapper.getResourceChecksum() + " WHERE service_id = '" + wrapper.getServiceId() + "'" + " AND patient_id = '" + patientId + "'" + " AND resource_type = '" + wrapper.getResourceType() + "'" + " AND resource_id = '" + wrapper.getResourceId() + "'"; statement.executeUpdate(updateSql); //LOG.debug(updateSql); //SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:SS"); //String createdAtStr = sdf.format(wrapper.getCreatedAt()); updateSql = "UPDATE resource_history" + " SET resource_data = '" + json + "'," + " resource_checksum = " + wrapper.getResourceChecksum() + " WHERE resource_id = '" + wrapper.getResourceId() + "'" + " AND resource_type = '" + wrapper.getResourceType() + "'" //+ " AND created_at = '" + createdAtStr + "'" + " AND version = '" + wrapper.getVersion() + "'"; statement.executeUpdate(updateSql); //LOG.debug(updateSql); entityManager.getTransaction().commit(); } /*private static void populateNewSearchTable(String table) { LOG.info("Populating New Search Table"); try { EntityManager entityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); List<String> patientIds = new ArrayList<>(); Map<String, String> serviceIds = new HashMap<>(); String sql = "SELECT patient_id, service_id FROM " + table + " WHERE done = 0"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { String patientId = rs.getString(1); String serviceId = rs.getString(2); patientIds.add(patientId); serviceIds.put(patientId, serviceId); } rs.close(); statement.close(); entityManager.close(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearch2Dal(); LOG.info("Found " + patientIds.size() + " to do"); for (int i=0; i<patientIds.size(); i++) { String patientIdStr = patientIds.get(i); UUID patientId = UUID.fromString(patientIdStr); String serviceIdStr = serviceIds.get(patientIdStr); UUID serviceId = UUID.fromString(serviceIdStr); Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(serviceId, ResourceType.Patient, patientIdStr); if (patient != null) { patientSearchDal.update(serviceId, patient); //find episode of care List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, null, patientId, ResourceType.EpisodeOfCare.toString()); for (ResourceWrapper wrapper: wrappers) { if (!wrapper.isDeleted()) { EpisodeOfCare episodeOfCare = (EpisodeOfCare)FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); patientSearchDal.update(serviceId, episodeOfCare); } } } String updateSql = "UPDATE " + table + " SET done = 1 WHERE patient_id = '" + patientIdStr + "' AND service_id = '" + serviceIdStr + "';"; entityManager = ConnectionManager.getEdsEntityManager(); session = (SessionImpl)entityManager.getDelegate(); connection = session.connection(); statement = connection.createStatement(); entityManager.getTransaction().begin(); statement.executeUpdate(updateSql); entityManager.getTransaction().commit(); if (i % 5000 == 0) { LOG.info("Done " + (i+1) + " of " + patientIds.size()); } } entityManager.close(); LOG.info("Finished Populating New Search Table"); } catch (Exception ex) { LOG.error("", ex); } }*/ private static void createBartsSubset(String sourceDir, UUID serviceUuid, UUID systemUuid, String samplePatientsFile) { LOG.info("Creating Barts Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line: lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } createBartsSubsetForFile(sourceDir, serviceUuid, systemUuid, personIds); LOG.info("Finished Creating Barts Subset"); } catch (Throwable t) { LOG.error("", t); } } /*private static void createBartsSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { for (File sourceFile: sourceDir.listFiles()) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } LOG.info("Doing dir " + sourceFile); createBartsSubsetForFile(sourceFile, destFile, personIds); } else { //we have some bad partial files in, so ignore them String ext = FilenameUtils.getExtension(name); if (ext.equalsIgnoreCase("filepart")) { continue; } //if the file is empty, we still need the empty file in the filtered directory, so just copy it if (sourceFile.length() == 0) { LOG.info("Copying empty file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } continue; } String baseName = FilenameUtils.getBaseName(name); String fileType = BartsCsvToFhirTransformer.identifyFileType(baseName); if (isCerner22File(fileType)) { LOG.info("Checking 2.2 file " + sourceFile); if (destFile.exists()) { destFile.delete(); } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); int lineIndex = -1; PrintWriter pw = null; int personIdColIndex = -1; int expectedCols = -1; while (true) { String line = br.readLine(); if (line == null) { break; } lineIndex ++; if (lineIndex == 0) { if (fileType.equalsIgnoreCase("FAMILYHISTORY")) { //this file has no headers, so needs hard-coding personIdColIndex = 5; } else { //check headings for PersonID col String[] toks = line.split("\\|", -1); expectedCols = toks.length; for (int i=0; i<expectedCols; i++) { String col = toks[i]; if (col.equalsIgnoreCase("PERSON_ID") || col.equalsIgnoreCase("#PERSON_ID")) { personIdColIndex = i; break; } } //if no person ID, then just copy the entire file if (personIdColIndex == -1) { br.close(); br = null; LOG.info(" Copying 2.2 file to " + destFile); copyFile(sourceFile, destFile); break; } else { LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex); } } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); pw = new PrintWriter(bw); } else { //filter on personID String[] toks = line.split("\\|", -1); if (expectedCols != -1 && toks.length != expectedCols) { throw new Exception("Line " + (lineIndex+1) + " has " + toks.length + " cols but expecting " + expectedCols); } else { String personId = toks[personIdColIndex]; if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes && !personIds.contains(personId)) { continue; } } } pw.println(line); } if (br != null) { br.close(); } if (pw != null) { pw.flush(); pw.close(); } } else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } } } } }*/ private static void createBartsSubsetForFile(String sourceDir, UUID serviceUuid, UUID systemUuid, Set<String> personIds) throws Exception { ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE); for (Exchange exchange: exchanges) { List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); for (ExchangePayloadFile fileObj : files) { String filePathWithoutSharedStorage = fileObj.getPath().substring(TransformConfig.instance().getSharedStoragePath().length()+1); String sourceFilePath = FilenameUtils.concat(sourceDir, filePathWithoutSharedStorage); File sourceFile = new File(sourceFilePath); String destFilePath = fileObj.getPath(); File destFile = new File(destFilePath); File destDir = destFile.getParentFile(); if (!destDir.exists()) { destDir.mkdirs(); } //if the file is empty, we still need the empty file in the filtered directory, so just copy it if (sourceFile.length() == 0) { LOG.info("Copying empty file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } continue; } String fileType = fileObj.getType(); if (isCerner22File(fileType)) { LOG.info("Checking 2.2 file " + sourceFile); if (destFile.exists()) { destFile.delete(); } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); int lineIndex = -1; PrintWriter pw = null; int personIdColIndex = -1; int expectedCols = -1; while (true) { String line = br.readLine(); if (line == null) { break; } lineIndex++; if (lineIndex == 0) { if (fileType.equalsIgnoreCase("FAMILYHISTORY")) { //this file has no headers, so needs hard-coding personIdColIndex = 5; } else { //check headings for PersonID col String[] toks = line.split("\\|", -1); expectedCols = toks.length; for (int i = 0; i < expectedCols; i++) { String col = toks[i]; if (col.equalsIgnoreCase("PERSON_ID") || col.equalsIgnoreCase("#PERSON_ID")) { personIdColIndex = i; break; } } //if no person ID, then just copy the entire file if (personIdColIndex == -1) { br.close(); br = null; LOG.info(" Copying 2.2 file to " + destFile); copyFile(sourceFile, destFile); break; } else { LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex); } } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); pw = new PrintWriter(bw); } else { //filter on personID String[] toks = line.split("\\|", -1); if (expectedCols != -1 && toks.length != expectedCols) { throw new Exception("Line " + (lineIndex + 1) + " has " + toks.length + " cols but expecting " + expectedCols); } else { String personId = toks[personIdColIndex]; if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes && !personIds.contains(personId)) { continue; } } } pw.println(line); } if (br != null) { br.close(); } if (pw != null) { pw.flush(); pw.close(); } } else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } } } } } private static void copyFile(File src, File dst) throws Exception { FileInputStream fis = new FileInputStream(src); BufferedInputStream bis = new BufferedInputStream(fis); Files.copy(bis, dst.toPath()); bis.close(); } private static boolean isCerner22File(String fileType) throws Exception { if (fileType.equalsIgnoreCase("PPATI") || fileType.equalsIgnoreCase("PPREL") || fileType.equalsIgnoreCase("CDSEV") || fileType.equalsIgnoreCase("PPATH") || fileType.equalsIgnoreCase("RTTPE") || fileType.equalsIgnoreCase("AEATT") || fileType.equalsIgnoreCase("AEINV") || fileType.equalsIgnoreCase("AETRE") || fileType.equalsIgnoreCase("OPREF") || fileType.equalsIgnoreCase("OPATT") || fileType.equalsIgnoreCase("EALEN") || fileType.equalsIgnoreCase("EALSU") || fileType.equalsIgnoreCase("EALOF") || fileType.equalsIgnoreCase("HPSSP") || fileType.equalsIgnoreCase("IPEPI") || fileType.equalsIgnoreCase("IPWDS") || fileType.equalsIgnoreCase("DELIV") || fileType.equalsIgnoreCase("BIRTH") || fileType.equalsIgnoreCase("SCHAC") || fileType.equalsIgnoreCase("APPSL") || fileType.equalsIgnoreCase("DIAGN") || fileType.equalsIgnoreCase("PROCE") || fileType.equalsIgnoreCase("ORDER") || fileType.equalsIgnoreCase("DOCRP") || fileType.equalsIgnoreCase("DOCREF") || fileType.equalsIgnoreCase("CNTRQ") || fileType.equalsIgnoreCase("LETRS") || fileType.equalsIgnoreCase("LOREF") || fileType.equalsIgnoreCase("ORGREF") || fileType.equalsIgnoreCase("PRSNLREF") || fileType.equalsIgnoreCase("CVREF") || fileType.equalsIgnoreCase("NOMREF") || fileType.equalsIgnoreCase("EALIP") || fileType.equalsIgnoreCase("CLEVE") || fileType.equalsIgnoreCase("ENCNT") || fileType.equalsIgnoreCase("RESREF") || fileType.equalsIgnoreCase("PPNAM") || fileType.equalsIgnoreCase("PPADD") || fileType.equalsIgnoreCase("PPPHO") || fileType.equalsIgnoreCase("PPALI") || fileType.equalsIgnoreCase("PPINF") || fileType.equalsIgnoreCase("PPAGP") || fileType.equalsIgnoreCase("SURCC") || fileType.equalsIgnoreCase("SURCP") || fileType.equalsIgnoreCase("SURCA") || fileType.equalsIgnoreCase("SURCD") || fileType.equalsIgnoreCase("PDRES") || fileType.equalsIgnoreCase("PDREF") || fileType.equalsIgnoreCase("ABREF") || fileType.equalsIgnoreCase("CEPRS") || fileType.equalsIgnoreCase("ORDDT") || fileType.equalsIgnoreCase("STATREF") || fileType.equalsIgnoreCase("STATA") || fileType.equalsIgnoreCase("ENCINF") || fileType.equalsIgnoreCase("SCHDETAIL") || fileType.equalsIgnoreCase("SCHOFFER") || fileType.equalsIgnoreCase("PPGPORG") || fileType.equalsIgnoreCase("FAMILYHISTORY")) { return true; } else { return false; } } /*private static void fixSubscriberDbs() { LOG.info("Fixing Subscriber DBs"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-05-11"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); boolean needsFixing = false; for (UUID exchangeId: exchangeIds) { if (!needsFixing) { List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId); for (ExchangeTransformAudit audit: transformAudits) { Date transfromStart = audit.getStarted(); if (!transfromStart.before(dateError)) { needsFixing = true; break; } } } if (!needsFixing) { continue; } List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId); Exchange exchange = exchangeDal.getExchange(exchangeId); LOG.info(" Posting exchange " + exchangeId + " with " + batches.size() + " batches"); List<UUID> batchIds = new ArrayList<>(); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } UUID batchId = batch.getBatchId(); batchIds.add(batchId); } String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } } LOG.info("Finished Fixing Subscriber DBs"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixReferralRequests() { LOG.info("Fixing Referral Requests"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-04-24"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); boolean needsFixing = false; Set<UUID> patientIdsToPost = new HashSet<>(); for (UUID exchangeId: exchangeIds) { if (!needsFixing) { List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId); for (ExchangeTransformAudit audit: transformAudits) { Date transfromStart = audit.getStarted(); if (!transfromStart.before(dateError)) { needsFixing = true; break; } } } if (!needsFixing) { continue; } List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId); Exchange exchange = exchangeDal.getExchange(exchangeId); LOG.info("Checking exchange " + exchangeId + " with " + batches.size() + " batches"); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } UUID batchId = batch.getBatchId(); List<ResourceWrapper> wrappers = resourceDal.getResourcesForBatch(serviceId, batchId); for (ResourceWrapper wrapper: wrappers) { String resourceType = wrapper.getResourceType(); if (!resourceType.equals(ResourceType.ReferralRequest.toString()) || wrapper.isDeleted()) { continue; } String json = wrapper.getResourceData(); ReferralRequest referral = (ReferralRequest)FhirSerializationHelper.deserializeResource(json); *//*if (!referral.hasServiceRequested()) { continue; } CodeableConcept reason = referral.getServiceRequested().get(0); referral.setReason(reason); referral.getServiceRequested().clear();*//* if (!referral.hasReason()) { continue; } CodeableConcept reason = referral.getReason(); referral.setReason(null); referral.addServiceRequested(reason); json = FhirSerializationHelper.serializeResource(referral); wrapper.setResourceData(json); saveResourceWrapper(serviceId, wrapper); //add to the set of patients we know need sending on to the protocol queue patientIdsToPost.add(patientId); LOG.info("Fixed " + resourceType + " " + wrapper.getResourceId() + " in batch " + batchId); } //if our patient has just been fixed or was fixed before, post onto the protocol queue if (patientIdsToPost.contains(patientId)) { List<UUID> batchIds = new ArrayList<>(); batchIds.add(batchId); String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } } } } LOG.info("Finished Fixing Referral Requests"); } catch (Throwable t) { LOG.error("", t); } }*/ private static void applyEmisAdminCaches() { LOG.info("Applying Emis Admin Caches"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } if (!exchangeDal.isServiceStarted(serviceId, endpointSystemId)) { LOG.info(" Service not started, so skipping"); continue; } //get exchanges List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); if (exchangeIds.isEmpty()) { LOG.info(" No exchanges found, so skipping"); continue; } UUID firstExchangeId = exchangeIds.get(0); List<ExchangeEvent> events = exchangeDal.getExchangeEvents(firstExchangeId); boolean appliedAdminCache = false; for (ExchangeEvent event: events) { if (event.getEventDesc().equals("Applied Emis Admin Resource Cache")) { appliedAdminCache = true; } } if (appliedAdminCache) { LOG.info(" Have already applied admin cache, so skipping"); continue; } Exchange exchange = exchangeDal.getExchange(firstExchangeId); String body = exchange.getBody(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(body); if (files.length == 0) { LOG.info(" No files in exchange " + firstExchangeId + " so skipping"); continue; } String firstFilePath = files[0]; String name = FilenameUtils.getBaseName(firstFilePath); //file name without extension String[] toks = name.split("_"); if (toks.length != 5) { throw new TransformException("Failed to extract data sharing agreement GUID from filename " + firstFilePath); } String sharingAgreementGuid = toks[4]; List<UUID> batchIds = new ArrayList<>(); TransformError transformError = new TransformError(); FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(firstExchangeId, serviceId, endpointSystemId, transformError, batchIds); EmisCsvHelper csvHelper = new EmisCsvHelper(fhirResourceFiler.getServiceId(), fhirResourceFiler.getSystemId(), fhirResourceFiler.getExchangeId(), sharingAgreementGuid, true); ExchangeTransformAudit transformAudit = new ExchangeTransformAudit(); transformAudit.setServiceId(serviceId); transformAudit.setSystemId(endpointSystemId); transformAudit.setExchangeId(firstExchangeId); transformAudit.setId(UUID.randomUUID()); transformAudit.setStarted(new Date()); LOG.info(" Going to apply admin resource cache"); csvHelper.applyAdminResourceCache(fhirResourceFiler); fhirResourceFiler.waitToFinish(); for (UUID batchId: batchIds) { LOG.info(" Created batch ID " + batchId + " for exchange " + firstExchangeId); } transformAudit.setEnded(new Date()); transformAudit.setNumberBatchesCreated(new Integer(batchIds.size())); boolean hadError = false; if (transformError.getError().size() > 0) { transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError)); hadError = true; } exchangeDal.save(transformAudit); //clear down the cache of reference mappings since they won't be of much use for the next Exchange IdHelper.clearCache(); if (hadError) { LOG.error(" <<<<<<Error applying resource cache!"); continue; } //add the event to say we've applied the cache AuditWriter.writeExchangeEvent(firstExchangeId, "Applied Emis Admin Resource Cache"); //post that ONE new batch ID onto the protocol queue String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } LOG.info("Finished Applying Emis Admin Caches"); } catch (Throwable t) { LOG.error("", t); } } /*private static void fixBartsEscapedFiles(String filePath) { LOG.info("Fixing Barts Escaped Files in " + filePath); try { fixBartsEscapedFilesInDir(new File(filePath)); LOG.info("Finished fixing Barts Escaped Files in " + filePath); } catch (Throwable t) { LOG.error("", t); } } /** * fixes Emis extract(s) when a practice was disabled then subsequently re-bulked, by * replacing the "delete" extracts with newly generated deltas that can be processed * before the re-bulk is done */ private static void fixDisabledEmisExtract(String serviceOdsCode, String systemId, String sharedStoragePath, String tempDirParent) { LOG.info("Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceOdsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(serviceOdsCode); LOG.info("Service " + service.getId() + " " + service.getName()); /*File tempDirLast = new File(tempDir, "last"); if (!tempDirLast.exists()) { if (!tempDirLast.mkdirs()) { throw new Exception("Failed to create temp dir " + tempDirLast); } tempDirLast.mkdirs(); } File tempDirEmpty = new File(tempDir, "empty"); if (!tempDirEmpty.exists()) { if (!tempDirEmpty.mkdirs()) { throw new Exception("Failed to create temp dir " + tempDirEmpty); } tempDirEmpty.mkdirs(); }*/ String tempDir = FilenameUtils.concat(tempDirParent, serviceOdsCode); File f = new File(tempDir); if (f.exists()) { FileUtils.deleteDirectory(f); } UUID serviceUuid = service.getId(); UUID systemUuid = UUID.fromString(systemId); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); //get all the exchanges, which are returned in reverse order, so reverse for simplicity List<Exchange> exchangesDesc = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE); Map<Exchange, List<String>> hmExchangeFiles = new HashMap<>(); Map<Exchange, List<String>> hmExchangeFilesWithoutStoragePrefix = new HashMap<>(); //reverse the exchange list and cache the files for each one List<Exchange> exchanges = new ArrayList<>(); for (int i=exchangesDesc.size()-1; i>=0; i--) { Exchange exchange = exchangesDesc.get(i); String exchangeBody = exchange.getBody(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); //drop out and ignore any exchanges containing the singular bespoke reg status files if (files.length <= 1) { continue; } //drop out and ignore any exchanges for the left and dead extracts, since we don't //expect to receive re-bulked data for the dead patients String firstFile = files[0]; if (firstFile.indexOf("LEFT_AND_DEAD") > -1) { continue; } exchanges.add(exchange); //populate the map of the files with the shared storage prefix List<String> fileList = Lists.newArrayList(files); hmExchangeFiles.put(exchange, fileList); //populate a map of the same files without the prefix files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); for (int j=0; j<files.length; j++) { String file = files[j].substring(sharedStoragePath.length() + 1); files[j] = file; } fileList = Lists.newArrayList(files); hmExchangeFilesWithoutStoragePrefix.put(exchange, fileList); } /*exchanges.sort((o1, o2) -> { Date d1 = o1.getTimestamp(); Date d2 = o2.getTimestamp(); return d1.compareTo(d2); });*/ LOG.info("Found " + exchanges.size() + " exchanges and cached their files"); int indexDisabled = -1; int indexRebulked = -1; int indexOriginallyBulked = -1; //go back through them to find the extract where the re-bulk is and when it was disabled for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); boolean disabled = isDisabledInSharingAgreementFile(files); if (disabled) { indexDisabled = i; } else { if (indexDisabled == -1) { indexRebulked = i; } else { //if we've found a non-disabled extract older than the disabled ones, //then we've gone far enough back break; } } } //go back from when disabled to find the previous bulk load (i.e. the first one or one after it was previously not disabled) for (int i=indexDisabled-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); boolean disabled = isDisabledInSharingAgreementFile(files); if (disabled) { break; } indexOriginallyBulked = i; } if (indexDisabled == -1 || indexRebulked == -1 || indexOriginallyBulked == -1) { throw new Exception("Failed to find exchanges for disabling (" + indexDisabled + "), re-bulking (" + indexRebulked + ") or original bulk (" + indexOriginallyBulked + ")"); } Exchange exchangeDisabled = exchanges.get(indexDisabled); LOG.info("Disabled on " + findExtractDate(exchangeDisabled, hmExchangeFiles) + " " + exchangeDisabled.getId()); Exchange exchangeRebulked = exchanges.get(indexRebulked); LOG.info("Rebulked on " + findExtractDate(exchangeRebulked, hmExchangeFiles) + " " + exchangeRebulked.getId()); Exchange exchangeOriginallyBulked = exchanges.get(indexOriginallyBulked); LOG.info("Originally bulked on " + findExtractDate(exchangeOriginallyBulked, hmExchangeFiles) + " " + exchangeOriginallyBulked.getId()); //continueOrQuit(); List<String> rebulkFiles = hmExchangeFiles.get(exchangeRebulked); List<String> tempFilesCreated = new ArrayList<>(); Set<String> patientGuidsDeletedOrTooOld = new HashSet<>(); for (String rebulkFile: rebulkFiles) { String fileType = findFileType(rebulkFile); if (!isPatientFile(fileType)) { continue; } LOG.info("Doing " + fileType); String guidColumnName = getGuidColumnName(fileType); //find all the guids in the re-bulk Set<String> idsInRebulk = new HashSet<>(); InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(rebulkFile); CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); String[] headers = null; try { headers = CsvHelper.getHeaderMapAsArray(csvParser); Iterator<CSVRecord> iterator = csvParser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); //get the patient and row guid out of the file and cache in our set String id = record.get("PatientGuid"); if (!Strings.isNullOrEmpty(guidColumnName)) { id += "//" + record.get(guidColumnName); } idsInRebulk.add(id); } } finally { csvParser.close(); } LOG.info("Found " + idsInRebulk.size() + " IDs in re-bulk file: " + rebulkFile); //create a replacement file for the exchange the service was disabled String replacementDisabledFile = null; List<String> disabledFiles = hmExchangeFilesWithoutStoragePrefix.get(exchangeDisabled); for (String s: disabledFiles) { String disabledFileType = findFileType(s); if (disabledFileType.equals(fileType)) { replacementDisabledFile = FilenameUtils.concat(tempDir, s); File dir = new File(replacementDisabledFile).getParentFile(); if (!dir.exists()) { if (!dir.mkdirs()) { throw new Exception("Failed to create directory " + dir); } } tempFilesCreated.add(s); LOG.info("Created replacement file " + replacementDisabledFile); } } FileWriter fileWriter = new FileWriter(replacementDisabledFile); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers)); csvPrinter.flush(); Set<String> pastIdsProcessed = new HashSet<>(); //now go through all files of the same type PRIOR to the service was disabled //to find any rows that we'll need to explicitly delete because they were deleted while //the extract was disabled for (int i=indexDisabled-1; i>=indexOriginallyBulked; i--) { Exchange exchange = exchanges.get(i); String originalFile = null; List<String> files = hmExchangeFiles.get(exchange); for (String s: files) { String originalFileType = findFileType(s); if (originalFileType.equals(fileType)) { originalFile = s; break; } } if (originalFile == null) { continue; } LOG.info(" Reading " + originalFile); reader = FileHelper.readFileReaderFromSharedStorage(originalFile); csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); try { Iterator<CSVRecord> iterator = csvParser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String patientGuid = record.get("PatientGuid"); //get the patient and row guid out of the file and cache in our set String uniqueId = patientGuid; if (!Strings.isNullOrEmpty(guidColumnName)) { uniqueId += "//" + record.get(guidColumnName); } //if we're already handled this record in a more recent extract, then skip it if (pastIdsProcessed.contains(uniqueId)) { continue; } pastIdsProcessed.add(uniqueId); //if this ID isn't deleted and isn't in the re-bulk then it means //it WAS deleted in Emis Web but we didn't receive the delete, because it was deleted //from Emis Web while the extract feed was disabled //if the record is deleted, then we won't expect it in the re-bulk boolean deleted = Boolean.parseBoolean(record.get("Deleted")); if (deleted) { //if it's the Patient file, stick the patient GUID in a set so we know full patient record deletes if (fileType.equals("Admin_Patient")) { patientGuidsDeletedOrTooOld.add(patientGuid); } continue; } //if it's not the patient file and we refer to a patient that we know //has been deleted, then skip this row, since we know we're deleting the entire patient record if (patientGuidsDeletedOrTooOld.contains(patientGuid)) { continue; } //if the re-bulk contains a record matching this one, then it's OK if (idsInRebulk.contains(uniqueId)) { continue; } //the rebulk won't contain any data for patients that are now too old (i.e. deducted or deceased > 2 yrs ago), //so any patient ID in the original files but not in the rebulk can be treated like this and any data for them can be skipped if (fileType.equals("Admin_Patient")) { //retrieve the Patient and EpisodeOfCare resource for the patient so we can confirm they are deceased or deducted ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID patientUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.Patient, patientGuid); if (patientUuid == null) { throw new Exception("Failed to find patient UUID from GUID [" + patientGuid + "]"); } Patient patientResource = (Patient)resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.Patient, patientUuid.toString()); if (patientResource.hasDeceased()) { patientGuidsDeletedOrTooOld.add(patientGuid); continue; } UUID episodeUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.EpisodeOfCare, patientGuid); //we use the patient GUID for the episode too EpisodeOfCare episodeResource = (EpisodeOfCare)resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.EpisodeOfCare, episodeUuid.toString()); if (episodeResource.hasPeriod() && !PeriodHelper.isActive(episodeResource.getPeriod())) { patientGuidsDeletedOrTooOld.add(patientGuid); continue; } } //create a new CSV record, carrying over the GUIDs from the original but marking as deleted String[] newRecord = new String[headers.length]; for (int j=0; j<newRecord.length; j++) { String header = headers[j]; if (header.equals("PatientGuid") || header.equals("OrganisationGuid") || (!Strings.isNullOrEmpty(guidColumnName) && header.equals(guidColumnName))) { String val = record.get(header); newRecord[j] = val; } else if (header.equals("Deleted")) { newRecord[j] = "true"; } else { newRecord[j] = ""; } } csvPrinter.printRecord((Object[])newRecord); csvPrinter.flush(); //log out the raw record that's missing from the original StringBuffer sb = new StringBuffer(); sb.append("Record not in re-bulk: "); for (int j=0; j<record.size(); j++) { if (j > 0) { sb.append(","); } sb.append(record.get(j)); } LOG.info(sb.toString()); } } finally { csvParser.close(); } } csvPrinter.flush(); csvPrinter.close(); //also create a version of the CSV file with just the header and nothing else in for (int i=indexDisabled+1; i<indexRebulked; i++) { Exchange ex = exchanges.get(i); List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex); for (String s: exchangeFiles) { String exchangeFileType = findFileType(s); if (exchangeFileType.equals(fileType)) { String emptyTempFile = FilenameUtils.concat(tempDir, s); File dir = new File(emptyTempFile).getParentFile(); if (!dir.exists()) { if (!dir.mkdirs()) { throw new Exception("Failed to create directory " + dir); } } fileWriter = new FileWriter(emptyTempFile); bufferedWriter = new BufferedWriter(fileWriter); csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers)); csvPrinter.flush(); csvPrinter.close(); tempFilesCreated.add(s); LOG.info("Created empty file " + emptyTempFile); } } } } //we also need to copy the restored sharing agreement file to replace all the period it was disabled String rebulkedSharingAgreementFile = null; for (String s: rebulkFiles) { String fileType = findFileType(s); if (fileType.equals("Agreements_SharingOrganisation")) { rebulkedSharingAgreementFile = s; } } for (int i=indexDisabled; i<indexRebulked; i++) { Exchange ex = exchanges.get(i); List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex); for (String s: exchangeFiles) { String exchangeFileType = findFileType(s); if (exchangeFileType.equals("Agreements_SharingOrganisation")) { String replacementFile = FilenameUtils.concat(tempDir, s); InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkedSharingAgreementFile); File replacementFileObj = new File(replacementFile); Files.copy(inputStream, replacementFileObj.toPath()); inputStream.close(); tempFilesCreated.add(s); } } } //create a script to copy the files into S3 List<String> copyScript = new ArrayList<>(); copyScript.add("#!/bin/bash"); copyScript.add(""); for (String s: tempFilesCreated) { String localFile = FilenameUtils.concat(tempDir, s); copyScript.add("sudo aws s3 cp " + localFile + " s3://discoverysftplanding/endeavour/" + s); } String scriptFile = FilenameUtils.concat(tempDir, "copy.sh"); FileUtils.writeLines(new File(scriptFile), copyScript); LOG.info("Finished - written files to " + tempDir); dumpFileSizes(new File(tempDir)); /*continueOrQuit(); //back up every file where the service was disabled for (int i=indexDisabled; i<indexRebulked; i++) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); for (String file: files) { //first download from S3 to the local temp dir InputStream inputStream = FileHelper.readFileFromSharedStorage(file); String fileName = FilenameUtils.getName(file); String tempPath = FilenameUtils.concat(tempDir, fileName); File downloadDestination = new File(tempPath); Files.copy(inputStream, downloadDestination.toPath()); //then write back to S3 in a sub-dir of the original file String backupPath = FilenameUtils.getPath(file); backupPath = FilenameUtils.concat(backupPath, "Original"); backupPath = FilenameUtils.concat(backupPath, fileName); FileHelper.writeFileToSharedStorage(backupPath, downloadDestination); LOG.info("Backed up " + file + " -> " + backupPath); //delete from temp dir downloadDestination.delete(); } } continueOrQuit(); //copy the new CSV files into the dir where it was disabled List<String> disabledFiles = hmExchangeFiles.get(exchangeDisabled); for (String disabledFile: disabledFiles) { String fileType = findFileType(disabledFile); if (!isPatientFile(fileType)) { continue; } String tempFile = FilenameUtils.concat(tempDirLast.getAbsolutePath(), fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected temp file " + f); } FileHelper.writeFileToSharedStorage(disabledFile, f); LOG.info("Copied " + tempFile + " -> " + disabledFile); } continueOrQuit(); //empty the patient files for any extracts while the service was disabled for (int i=indexDisabled+1; i<indexRebulked; i++) { Exchange otherExchangeDisabled = exchanges.get(i); List<String> otherDisabledFiles = hmExchangeFiles.get(otherExchangeDisabled); for (String otherDisabledFile: otherDisabledFiles) { String fileType = findFileType(otherDisabledFile); if (!isPatientFile(fileType)) { continue; } String tempFile = FilenameUtils.concat(tempDirEmpty.getAbsolutePath(), fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected empty file " + f); } FileHelper.writeFileToSharedStorage(otherDisabledFile, f); LOG.info("Copied " + tempFile + " -> " + otherDisabledFile); } } continueOrQuit(); //copy the content of the sharing agreement file from when it was re-bulked for (String rebulkFile: rebulkFiles) { String fileType = findFileType(rebulkFile); if (fileType.equals("Agreements_SharingOrganisation")) { String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv"); File downloadDestination = new File(tempFile); InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkFile); Files.copy(inputStream, downloadDestination.toPath()); tempFilesCreated.add(tempFile); } } //replace the sharing agreement file for all disabled extracts with the non-disabled one for (int i=indexDisabled; i<indexRebulked; i++) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); for (String file: files) { String fileType = findFileType(file); if (fileType.equals("Agreements_SharingOrganisation")) { String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected empty file " + f); } FileHelper.writeFileToSharedStorage(file, f); LOG.info("Copied " + tempFile + " -> " + file); } } } LOG.info("Finished Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceId); continueOrQuit(); for (String tempFileCreated: tempFilesCreated) { File f = new File(tempFileCreated); if (f.exists()) { f.delete(); } }*/ } catch (Exception ex) { LOG.error("", ex); } } private static void dumpFileSizes(File f) { if (f.isDirectory()) { for (File child: f.listFiles()) { dumpFileSizes(child); } } else { String totalSizeReadable = FileUtils.byteCountToDisplaySize(f.length()); LOG.info("" + f + " = " + totalSizeReadable); } } private static String findExtractDate(Exchange exchange, Map<Exchange, List<String>> fileMap) throws Exception { List<String> files = fileMap.get(exchange); String file = findSharingAgreementFile(files); String name = FilenameUtils.getBaseName(file); String[] toks = name.split("_"); return toks[3]; } private static boolean isDisabledInSharingAgreementFile(List<String> files) throws Exception { String file = findSharingAgreementFile(files); InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(file); CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); try { Iterator<CSVRecord> iterator = csvParser.iterator(); CSVRecord record = iterator.next(); String s = record.get("Disabled"); boolean disabled = Boolean.parseBoolean(s); return disabled; } finally { csvParser.close(); } } private static void continueOrQuit() throws Exception { LOG.info("Enter y to continue, anything else to quit"); byte[] bytes = new byte[10]; System.in.read(bytes); char c = (char)bytes[0]; if (c != 'y' && c != 'Y') { System.out.println("Read " + c); System.exit(1); } } private static String getGuidColumnName(String fileType) { if (fileType.equals("Admin_Patient")) { //patient file just has patient GUID, nothing extra return null; } else if (fileType.equals("CareRecord_Consultation")) { return "ConsultationGuid"; } else if (fileType.equals("CareRecord_Diary")) { return "DiaryGuid"; } else if (fileType.equals("CareRecord_Observation")) { return "ObservationGuid"; } else if (fileType.equals("CareRecord_Problem")) { //there is no separate problem GUID, as it's just a modified observation return "ObservationGuid"; } else if (fileType.equals("Prescribing_DrugRecord")) { return "DrugRecordGuid"; } else if (fileType.equals("Prescribing_IssueRecord")) { return "IssueRecordGuid"; } else { throw new IllegalArgumentException(fileType); } } private static String findFileType(String filePath) { String fileName = FilenameUtils.getName(filePath); String[] toks = fileName.split("_"); String domain = toks[1]; String name = toks[2]; return domain + "_" + name; } private static boolean isPatientFile(String fileType) { if (fileType.equals("Admin_Patient") || fileType.equals("CareRecord_Consultation") || fileType.equals("CareRecord_Diary") || fileType.equals("CareRecord_Observation") || fileType.equals("CareRecord_Problem") || fileType.equals("Prescribing_DrugRecord") || fileType.equals("Prescribing_IssueRecord")) { //note the referral file doesn't have a Deleted column, so isn't in this list return true; } else { return false; } } private static String findSharingAgreementFile(List<String> files) throws Exception { for (String file : files) { String fileType = findFileType(file); if (fileType.equals("Agreements_SharingOrganisation")) { return file; } } throw new Exception("Failed to find sharing agreement file in " + files.get(0)); } private static void testSlack() { LOG.info("Testing slack"); try { SlackHelper.sendSlackMessage(SlackHelper.Channel.QueueReaderAlerts, "Test Message from Queue Reader"); LOG.info("Finished testing slack"); } catch (Exception ex) { LOG.error("", ex); } } /*private static void postToInboundFromFile(UUID serviceId, UUID systemId, String filePath) { try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); Service service = serviceDalI.getById(serviceId); LOG.info("Posting to inbound exchange for " + service.getName() + " from file " + filePath); FileReader fr = new FileReader(filePath); BufferedReader br = new BufferedReader(fr); int count = 0; List<UUID> exchangeIdBatch = new ArrayList<>(); while (true) { String line = br.readLine(); if (line == null) { break; } UUID exchangeId = UUID.fromString(line); //update the transform audit, so EDS UI knows we've re-queued this exchange ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId); if (audit != null && !audit.isResubmitted()) { audit.setResubmitted(true); auditRepository.save(audit); } count ++; exchangeIdBatch.add(exchangeId); if (exchangeIdBatch.size() >= 1000) { QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false); exchangeIdBatch = new ArrayList<>(); LOG.info("Done " + count); } } if (!exchangeIdBatch.isEmpty()) { QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false); LOG.info("Done " + count); } br.close(); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Posting to inbound for " + serviceId); }*/ /*private static void postToInbound(UUID serviceId, boolean all) { LOG.info("Posting to inbound for " + serviceId); try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); Service service = serviceDalI.getById(serviceId); List<UUID> systemIds = findSystemIds(service); UUID systemId = systemIds.get(0); ExchangeTransformErrorState errorState = auditRepository.getErrorState(serviceId, systemId); for (UUID exchangeId: errorState.getExchangeIdsInError()) { //update the transform audit, so EDS UI knows we've re-queued this exchange ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId); //skip any exchange IDs we've already re-queued up to be processed again if (audit.isResubmitted()) { LOG.debug("Not re-posting " + audit.getExchangeId() + " as it's already been resubmitted"); continue; } LOG.debug("Re-posting " + audit.getExchangeId()); audit.setResubmitted(true); auditRepository.save(audit); //then re-submit the exchange to Rabbit MQ for the queue reader to pick up QueueHelper.postToExchange(exchangeId, "EdsInbound", null, false); if (!all) { LOG.info("Posted first exchange, so stopping"); break; } } } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Posting to inbound for " + serviceId); }*/ private static void fixPatientSearchAllServices(String filterSystemId) { LOG.info("Fixing patient search for all services and system " + filterSystemId); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { fixPatientSearch(service.getId().toString(), filterSystemId); } LOG.info("Finished Fixing patient search for all services and system " + filterSystemId); } catch (Throwable t) { LOG.error("", t); } } private static void fixPatientSearch(String serviceId, String filterSystemId) { LOG.info("Fixing patient search for service " + serviceId); try { UUID serviceUuid = UUID.fromString(serviceId); UUID filterSystemUuid = null; if (!Strings.isNullOrEmpty(filterSystemId)) { filterSystemUuid = UUID.fromString(filterSystemId); } ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Set<UUID> patientsDone = new HashSet<>(); Service service = serviceDal.getById(serviceUuid); List<UUID> systemIds = findSystemIds(service); for (UUID systemId: systemIds) { if (filterSystemUuid != null && !filterSystemUuid.equals(systemId)) { continue; } List<UUID> exchanges = exchangeDalI.getExchangeIdsForService(serviceUuid, systemId); LOG.info("Found " + exchanges.size() + " exchanges for system " + systemId); for (UUID exchangeId : exchanges) { List<ExchangeBatch> batches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); LOG.info("Found " + batches.size() + " batches in exchange " + exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } if (patientsDone.contains(patientId)) { continue; } patientsDone.add(patientId); ResourceWrapper wrapper = resourceDalI.getCurrentVersion(serviceUuid, ResourceType.Patient.toString(), patientId); if (wrapper != null) { String json = wrapper.getResourceData(); if (!Strings.isNullOrEmpty(json)) { Patient fhirPatient = (Patient)FhirSerializationHelper.deserializeResource(json); patientSearchDal.update(serviceUuid, fhirPatient); } } if (patientsDone.size() % 1000 == 0) { LOG.info("Done " + patientsDone.size()); } } } } LOG.info("Done " + patientsDone.size()); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished fixing patient search for " + serviceId); } private static void runSql(String host, String username, String password, String sqlFile) { LOG.info("Running SQL on " + host + " from " + sqlFile); Connection conn = null; Statement statement = null; try { File f = new File(sqlFile); if (!f.exists()) { LOG.error("" + f + " doesn't exist"); return; } List<String> lines = FileUtils.readLines(f); /*String combined = String.join("\n", lines); LOG.info("Going to run SQL"); LOG.info(combined);*/ //load driver Class.forName("com.mysql.cj.jdbc.Driver"); //create connection Properties props = new Properties(); props.setProperty("user", username); props.setProperty("password", password); conn = DriverManager.getConnection(host, props); LOG.info("Opened connection"); statement = conn.createStatement(); long totalStart = System.currentTimeMillis(); for (String sql: lines) { sql = sql.trim(); if (sql.startsWith("--") || sql.startsWith("/*") || Strings.isNullOrEmpty(sql)) { continue; } LOG.info(""); LOG.info(sql); long start = System.currentTimeMillis(); boolean hasResultSet = statement.execute(sql); long end = System.currentTimeMillis(); LOG.info("SQL took " + (end - start) + "ms"); if (hasResultSet) { while (true) { ResultSet rs = statement.getResultSet(); int cols = rs.getMetaData().getColumnCount(); List<String> colHeaders = new ArrayList<>(); for (int i = 0; i < cols; i++) { String header = rs.getMetaData().getColumnName(i + 1); colHeaders.add(header); } String colHeaderStr = String.join(", ", colHeaders); LOG.info(colHeaderStr); while (rs.next()) { List<String> row = new ArrayList<>(); for (int i = 0; i < cols; i++) { Object o = rs.getObject(i + 1); if (rs.wasNull()) { row.add("<null>"); } else { row.add(o.toString()); } } String rowStr = String.join(", ", row); LOG.info(rowStr); } if (!statement.getMoreResults()) { break; } } } else { int updateCount = statement.getUpdateCount(); LOG.info("Updated " + updateCount + " Row(s)"); } } long totalEnd = System.currentTimeMillis(); LOG.info(""); LOG.info("Total time taken " + (totalEnd - totalStart) + "ms"); } catch (Throwable t) { LOG.error("", t); } finally { if (statement != null) { try { statement.close(); } catch (Exception ex) { } } if (conn != null) { try { conn.close(); } catch (Exception ex) { } } LOG.info("Closed connection"); } LOG.info("Finished Testing DB Size Limit"); } /*private static void fixExchangeBatches() { LOG.info("Starting Fixing Exchange Batches"); try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); List<Service> services = serviceDalI.getAll(); for (Service service: services) { LOG.info("Doing " + service.getName()); List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(service.getId()); for (UUID exchangeId: exchangeIds) { LOG.info(" Exchange " + exchangeId); List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch: exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null) { continue; } List<ResourceWrapper> resources = resourceDalI.getResourcesForBatch(exchangeBatch.getBatchId()); if (resources.isEmpty()) { continue; } ResourceWrapper first = resources.get(0); UUID patientId = first.getPatientId(); if (patientId != null) { exchangeBatch.setEdsPatientId(patientId); exchangeBatchDalI.save(exchangeBatch); LOG.info("Fixed batch " + exchangeBatch.getBatchId() + " -> " + exchangeBatch.getEdsPatientId()); } } } } LOG.info("Finished Fixing Exchange Batches"); } catch (Exception ex) { LOG.error("", ex); } }*/ /** * exports ADT Encounters for patients based on a CSV file produced using the below SQL --USE EDS DATABASE -- barts b5a08769-cbbe-4093-93d6-b696cd1da483 -- homerton 962d6a9a-5950-47ac-9e16-ebee56f9507a create table adt_patients ( service_id character(36), system_id character(36), nhs_number character varying(10), patient_id character(36) ); -- delete from adt_patients; select * from patient_search limit 10; select * from patient_link limit 10; insert into adt_patients select distinct ps.service_id, ps.system_id, ps.nhs_number, ps.patient_id from patient_search ps join patient_link pl on pl.patient_id = ps.patient_id join patient_link pl2 on pl.person_id = pl2.person_id join patient_search ps2 on ps2.patient_id = pl2.patient_id where ps.service_id IN ('b5a08769-cbbe-4093-93d6-b696cd1da483', '962d6a9a-5950-47ac-9e16-ebee56f9507a') and ps2.service_id NOT IN ('b5a08769-cbbe-4093-93d6-b696cd1da483', '962d6a9a-5950-47ac-9e16-ebee56f9507a'); select count(1) from adt_patients limit 100; select * from adt_patients limit 100; ---MOVE TABLE TO HL7 RECEIVER DB select count(1) from adt_patients; -- top 1000 patients with messages select * from mapping.resource_uuid where resource_type = 'Patient' limit 10; select * from log.message limit 10; create table adt_patient_counts ( nhs_number character varying(100), count int ); insert into adt_patient_counts select pid1, count(1) from log.message where pid1 is not null and pid1 <> '' group by pid1; select * from adt_patient_counts order by count desc limit 100; alter table adt_patients add count int; update adt_patients set count = adt_patient_counts.count from adt_patient_counts where adt_patients.nhs_number = adt_patient_counts.nhs_number; select count(1) from adt_patients where nhs_number is null; select * from adt_patients where nhs_number is not null and count is not null order by count desc limit 1000; */ /*private static void exportHl7Encounters(String sourceCsvPath, String outputPath) { LOG.info("Exporting HL7 Encounters from " + sourceCsvPath + " to " + outputPath); try { File sourceFile = new File(sourceCsvPath); CSVParser csvParser = CSVParser.parse(sourceFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); //"service_id","system_id","nhs_number","patient_id","count" int count = 0; HashMap<UUID, List<UUID>> serviceAndSystemIds = new HashMap<>(); HashMap<UUID, Integer> patientIds = new HashMap<>(); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); count ++; String serviceId = csvRecord.get("service_id"); String systemId = csvRecord.get("system_id"); String patientId = csvRecord.get("patient_id"); UUID serviceUuid = UUID.fromString(serviceId); List<UUID> systemIds = serviceAndSystemIds.get(serviceUuid); if (systemIds == null) { systemIds = new ArrayList<>(); serviceAndSystemIds.put(serviceUuid, systemIds); } systemIds.add(UUID.fromString(systemId)); patientIds.put(UUID.fromString(patientId), new Integer(count)); } csvParser.close(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ParserPool parser = new ParserPool(); Map<Integer, List<Object[]>> patientRows = new HashMap<>(); SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); for (UUID serviceId: serviceAndSystemIds.keySet()) { //List<UUID> systemIds = serviceAndSystemIds.get(serviceId); Service service = serviceDalI.getById(serviceId); String serviceName = service.getName(); LOG.info("Doing service " + serviceId + " " + serviceName); List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(serviceId); LOG.info("Got " + exchangeIds.size() + " exchange IDs to scan"); int exchangeCount = 0; for (UUID exchangeId: exchangeIds) { exchangeCount ++; if (exchangeCount % 1000 == 0) { LOG.info("Done " + exchangeCount + " exchanges"); } List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch: exchangeBatches) { UUID patientId = exchangeBatch.getEdsPatientId(); if (patientId != null && !patientIds.containsKey(patientId)) { continue; } Integer patientIdInt = patientIds.get(patientId); //get encounters for exchange batch UUID batchId = exchangeBatch.getBatchId(); List<ResourceWrapper> resourceWrappers = resourceDalI.getResourcesForBatch(serviceId, batchId); for (ResourceWrapper resourceWrapper: resourceWrappers) { if (resourceWrapper.isDeleted()) { continue; } String resourceType = resourceWrapper.getResourceType(); if (!resourceType.equals(ResourceType.Encounter.toString())) { continue; } LOG.info("Processing " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId()); String json = resourceWrapper.getResourceData(); Encounter fhirEncounter = (Encounter)parser.parse(json); Date date = null; if (fhirEncounter.hasPeriod()) { Period period = fhirEncounter.getPeriod(); if (period.hasStart()) { date = period.getStart(); } } String episodeId = null; if (fhirEncounter.hasEpisodeOfCare()) { Reference episodeReference = fhirEncounter.getEpisodeOfCare().get(0); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(episodeReference); EpisodeOfCare fhirEpisode = (EpisodeOfCare)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirEpisode != null) { if (fhirEpisode.hasIdentifier()) { episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_BARTS_FIN_EPISODE_ID); if (Strings.isNullOrEmpty(episodeId)) { episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_HOMERTON_FIN_EPISODE_ID); } } } } String adtType = null; String adtCode = null; Extension extension = ExtensionConverter.findExtension(fhirEncounter, FhirExtensionUri.HL7_MESSAGE_TYPE); if (extension != null) { CodeableConcept codeableConcept = (CodeableConcept) extension.getValue(); Coding hl7MessageTypeCoding = CodeableConceptHelper.findCoding(codeableConcept, FhirUri.CODE_SYSTEM_HL7V2_MESSAGE_TYPE); if (hl7MessageTypeCoding != null) { adtType = hl7MessageTypeCoding.getDisplay(); adtCode = hl7MessageTypeCoding.getCode(); } } else { //for older formats of the transformed resources, the HL7 message type can only be found from the raw original exchange body try { Exchange exchange = exchangeDalI.getExchange(exchangeId); String exchangeBody = exchange.getBody(); Bundle bundle = (Bundle) FhirResourceHelper.deserialiseResouce(exchangeBody); for (Bundle.BundleEntryComponent entry: bundle.getEntry()) { if (entry.getResource() != null && entry.getResource() instanceof MessageHeader) { MessageHeader header = (MessageHeader)entry.getResource(); if (header.hasEvent()) { Coding coding = header.getEvent(); adtType = coding.getDisplay(); adtCode = coding.getCode(); } } } } catch (Exception ex) { //if the exchange body isn't a FHIR bundle, then we'll get an error by treating as such, so just ignore them } } String cls = null; if (fhirEncounter.hasClass_()) { Encounter.EncounterClass encounterClass = fhirEncounter.getClass_(); if (encounterClass == Encounter.EncounterClass.OTHER && fhirEncounter.hasClass_Element() && fhirEncounter.getClass_Element().hasExtension()) { for (Extension classExtension: fhirEncounter.getClass_Element().getExtension()) { if (classExtension.getUrl().equals(FhirExtensionUri.ENCOUNTER_CLASS)) { //not 100% of the type of the value, so just append to a String cls = "" + classExtension.getValue(); } } } if (Strings.isNullOrEmpty(cls)) { cls = encounterClass.toCode(); } } String type = null; if (fhirEncounter.hasType()) { //only seem to ever have one type CodeableConcept codeableConcept = fhirEncounter.getType().get(0); type = codeableConcept.getText(); } String status = null; if (fhirEncounter.hasStatus()) { Encounter.EncounterState encounterState = fhirEncounter.getStatus(); status = encounterState.toCode(); } String location = null; String locationType = null; if (fhirEncounter.hasLocation()) { //first location is always the current location Encounter.EncounterLocationComponent encounterLocation = fhirEncounter.getLocation().get(0); if (encounterLocation.hasLocation()) { Reference locationReference = encounterLocation.getLocation(); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(locationReference); Location fhirLocation = (Location)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirLocation != null) { if (fhirLocation.hasName()) { location = fhirLocation.getName(); } if (fhirLocation.hasType()) { CodeableConcept typeCodeableConcept = fhirLocation.getType(); if (typeCodeableConcept.hasCoding()) { Coding coding = typeCodeableConcept.getCoding().get(0); locationType = coding.getDisplay(); } } } } } String clinician = null; if (fhirEncounter.hasParticipant()) { //first participant seems to be the interesting one Encounter.EncounterParticipantComponent encounterParticipant = fhirEncounter.getParticipant().get(0); if (encounterParticipant.hasIndividual()) { Reference practitionerReference = encounterParticipant.getIndividual(); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(practitionerReference); Practitioner fhirPractitioner = (Practitioner)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirPractitioner != null) { if (fhirPractitioner.hasName()) { HumanName name = fhirPractitioner.getName(); clinician = name.getText(); if (Strings.isNullOrEmpty(clinician)) { clinician = ""; for (StringType s: name.getPrefix()) { clinician += s.getValueNotNull(); clinician += " "; } for (StringType s: name.getGiven()) { clinician += s.getValueNotNull(); clinician += " "; } for (StringType s: name.getFamily()) { clinician += s.getValueNotNull(); clinician += " "; } clinician = clinician.trim(); } } } } } Object[] row = new Object[12]; row[0] = serviceName; row[1] = patientIdInt.toString(); row[2] = sdfOutput.format(date); row[3] = episodeId; row[4] = adtCode; row[5] = adtType; row[6] = cls; row[7] = type; row[8] = status; row[9] = location; row[10] = locationType; row[11] = clinician; List<Object[]> rows = patientRows.get(patientIdInt); if (rows == null) { rows = new ArrayList<>(); patientRows.put(patientIdInt, rows); } rows.add(row); } } } } String[] outputColumnHeaders = new String[] {"Source", "Patient", "Date", "Episode ID", "ADT Message Code", "ADT Message Type", "Class", "Type", "Status", "Location", "Location Type", "Clinician"}; FileWriter fileWriter = new FileWriter(outputPath); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVFormat format = CSVFormat.DEFAULT .withHeader(outputColumnHeaders) .withQuote('"'); CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, format); for (int i=0; i <= count; i++) { Integer patientIdInt = new Integer(i); List<Object[]> rows = patientRows.get(patientIdInt); if (rows != null) { for (Object[] row: rows) { csvPrinter.printRecord(row); } } } csvPrinter.close(); bufferedWriter.close(); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Exporting Encounters from " + sourceCsvPath + " to " + outputPath); }*/ /*private static void registerShutdownHook() { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { LOG.info(""); try { Thread.sleep(5000); } catch (Throwable ex) { LOG.error("", ex); } LOG.info("Done"); } }); }*/ private static void findEmisStartDates(String path, String outputPath) { LOG.info("Finding EMIS Start Dates in " + path + ", writing to " + outputPath); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH.mm.ss"); Map<String, Date> startDates = new HashMap<>(); Map<String, String> servers = new HashMap<>(); Map<String, String> names = new HashMap<>(); Map<String, String> odsCodes = new HashMap<>(); Map<String, String> cdbNumbers = new HashMap<>(); Map<String, Set<String>> distinctPatients = new HashMap<>(); File root = new File(path); for (File sftpRoot: root.listFiles()) { LOG.info("Checking " + sftpRoot); Map<Date, File> extracts = new HashMap<>(); List<Date> extractDates = new ArrayList<>(); for (File extractRoot: sftpRoot.listFiles()) { Date d = sdf.parse(extractRoot.getName()); //LOG.info("" + extractRoot.getName() + " -> " + d); extracts.put(d, extractRoot); extractDates.add(d); } Collections.sort(extractDates); for (Date extractDate: extractDates) { File extractRoot = extracts.get(extractDate); LOG.info("Checking " + extractRoot); //read the sharing agreements file //e.g. 291_Agreements_SharingOrganisation_20150211164536_45E7CD20-EE37-41AB-90D6-DC9D4B03D102.csv File sharingAgreementsFile = null; for (File f: extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("agreements_sharingorganisation") > -1 && name.endsWith(".csv")) { sharingAgreementsFile = f; break; } } if (sharingAgreementsFile == null) { LOG.info("Null agreements file for " + extractRoot); continue; } CSVParser csvParser = CSVParser.parse(sharingAgreementsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String activated = csvRecord.get("IsActivated"); String disabled = csvRecord.get("Disabled"); servers.put(orgGuid, sftpRoot.getName()); if (activated.equalsIgnoreCase("true")) { if (disabled.equalsIgnoreCase("false")) { Date d = sdf.parse(extractRoot.getName()); Date existingDate = startDates.get(orgGuid); if (existingDate == null) { startDates.put(orgGuid, d); } } else { if (startDates.containsKey(orgGuid)) { startDates.put(orgGuid, null); } } } } } finally { csvParser.close(); } //go through orgs file to get name, ods and cdb codes File orgsFile = null; for (File f: extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("admin_organisation_") > -1 && name.endsWith(".csv")) { orgsFile = f; break; } } csvParser = CSVParser.parse(orgsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String name = csvRecord.get("OrganisationName"); String odsCode = csvRecord.get("ODSCode"); String cdb = csvRecord.get("CDB"); names.put(orgGuid, name); odsCodes.put(orgGuid, odsCode); cdbNumbers.put(orgGuid, cdb); } } finally { csvParser.close(); } //go through patients file to get count File patientFile = null; for (File f: extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("admin_patient_") > -1 && name.endsWith(".csv")) { patientFile = f; break; } } csvParser = CSVParser.parse(patientFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String patientGuid = csvRecord.get("PatientGuid"); String deleted = csvRecord.get("Deleted"); Set<String> distinctPatientSet = distinctPatients.get(orgGuid); if (distinctPatientSet == null) { distinctPatientSet = new HashSet<>(); distinctPatients.put(orgGuid, distinctPatientSet); } if (deleted.equalsIgnoreCase("true")) { distinctPatientSet.remove(patientGuid); } else { distinctPatientSet.add(patientGuid); } } } finally { csvParser.close(); } } } SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd"); StringBuilder sb = new StringBuilder(); sb.append("Name,OdsCode,CDB,OrgGuid,StartDate,Server,Patients"); for (String orgGuid: startDates.keySet()) { Date startDate = startDates.get(orgGuid); String server = servers.get(orgGuid); String name = names.get(orgGuid); String odsCode = odsCodes.get(orgGuid); String cdbNumber = cdbNumbers.get(orgGuid); Set<String> distinctPatientSet = distinctPatients.get(orgGuid); String startDateDesc = null; if (startDate != null) { startDateDesc = sdfOutput.format(startDate); } Long countDistinctPatients = null; if (distinctPatientSet != null) { countDistinctPatients = new Long(distinctPatientSet.size()); } sb.append("\n"); sb.append("\"" + name + "\""); sb.append(","); sb.append("\"" + odsCode + "\""); sb.append(","); sb.append("\"" + cdbNumber + "\""); sb.append(","); sb.append("\"" + orgGuid + "\""); sb.append(","); sb.append(startDateDesc); sb.append(","); sb.append("\"" + server + "\""); sb.append(","); sb.append(countDistinctPatients); } LOG.info(sb.toString()); FileUtils.writeStringToFile(new File(outputPath), sb.toString()); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Finding Start Dates in " + path + ", writing to " + outputPath); } private static void findEncounterTerms(String path, String outputPath) { LOG.info("Finding Encounter Terms from " + path); Map<String, Long> hmResults = new HashMap<>(); //source term, source term snomed ID, source term snomed term - count try { File root = new File(path); File[] files = root.listFiles(); for (File readerRoot: files) { //emis001 LOG.info("Finding terms in " + readerRoot); //first read in all the coding files to build up our map of codes Map<String, String> hmCodes = new HashMap<>(); for (File dateFolder: readerRoot.listFiles()) { LOG.info("Looking for codes in " + dateFolder); File f = findFile(dateFolder, "Coding_ClinicalCode"); if (f == null) { LOG.error("Failed to find coding file in " + dateFolder.getAbsolutePath()); continue; } CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String codeId = csvRecord.get("CodeId"); String term = csvRecord.get("Term"); String snomed = csvRecord.get("SnomedCTConceptId"); hmCodes.put(codeId, snomed + ",\"" + term + "\""); } csvParser.close(); } SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); Date cutoff = dateFormat.parse("2017-01-01"); //now process the consultation files themselves for (File dateFolder: readerRoot.listFiles()) { LOG.info("Looking for consultations in " + dateFolder); File f = findFile(dateFolder, "CareRecord_Consultation"); if (f == null) { LOG.error("Failed to find consultation file in " + dateFolder.getAbsolutePath()); continue; } CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String term = csvRecord.get("ConsultationSourceTerm"); String codeId = csvRecord.get("ConsultationSourceCodeId"); if (Strings.isNullOrEmpty(term) && Strings.isNullOrEmpty(codeId)) { continue; } String date = csvRecord.get("EffectiveDate"); if (Strings.isNullOrEmpty(date)) { continue; } Date d = dateFormat.parse(date); if (d.before(cutoff)) { continue; } String line = "\"" + term + "\","; if (!Strings.isNullOrEmpty(codeId)) { String codeLookup = hmCodes.get(codeId); if (codeLookup == null) { LOG.error("Failed to find lookup for codeID " + codeId); continue; } line += codeLookup; } else { line += ","; } Long count = hmResults.get(line); if (count == null) { count = new Long(1); } else { count = new Long(count.longValue() + 1); } hmResults.put(line, count); } csvParser.close(); } } //save results to file StringBuilder output = new StringBuilder(); output.append("\"consultation term\",\"snomed concept ID\",\"snomed term\",\"count\""); output.append("\r\n"); for (String line: hmResults.keySet()) { Long count = hmResults.get(line); String combined = line + "," + count; output.append(combined); output.append("\r\n"); } LOG.info("FInished"); LOG.info(output.toString()); FileUtils.writeStringToFile(new File(outputPath), output.toString()); LOG.info("written output to " + outputPath); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished finding Encounter Terms from " + path); } private static File findFile(File root, String token) throws Exception { for (File f: root.listFiles()) { String s = f.getName(); if (s.indexOf(token) > -1) { return f; } } return null; } /*private static void populateProtocolQueue(String serviceIdStr, String startingExchangeId) { LOG.info("Starting Populating Protocol Queue for " + serviceIdStr); ServiceDalI serviceRepository = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); if (serviceIdStr.equalsIgnoreCase("All")) { serviceIdStr = null; } try { List<Service> services = new ArrayList<>(); if (Strings.isNullOrEmpty(serviceIdStr)) { services = serviceRepository.getAll(); } else { UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); services.add(service); } for (Service service: services) { List<UUID> exchangeIds = auditRepository.getExchangeIdsForService(service.getId()); LOG.info("Found " + exchangeIds.size() + " exchangeIds for " + service.getName()); if (startingExchangeId != null) { UUID startingExchangeUuid = UUID.fromString(startingExchangeId); if (exchangeIds.contains(startingExchangeUuid)) { //if in the list, remove everything up to and including the starting exchange int index = exchangeIds.indexOf(startingExchangeUuid); LOG.info("Found starting exchange " + startingExchangeId + " at " + index + " so removing up to this point"); for (int i=index; i>=0; i--) { exchangeIds.remove(i); } startingExchangeId = null; } else { //if not in the list, skip all these exchanges LOG.info("List doesn't contain starting exchange " + startingExchangeId + " so skipping"); continue; } } QueueHelper.postToExchange(exchangeIds, "edsProtocol", null, true); } } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Populating Protocol Queue for " + serviceIdStr); }*/ /*private static void findDeletedOrgs() { LOG.info("Starting finding deleted orgs"); ServiceDalI serviceRepository = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); List<Service> services = new ArrayList<>(); try { for (Service service: serviceRepository.getAll()) { services.add(service); } } catch (Exception ex) { LOG.error("", ex); } services.sort((o1, o2) -> { String name1 = o1.getName(); String name2 = o2.getName(); return name1.compareToIgnoreCase(name2); }); for (Service service: services) { try { UUID serviceUuid = service.getId(); List<Exchange> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 1, new Date(0), new Date()); LOG.info("Service: " + service.getName() + " " + service.getLocalId()); if (exchangeByServices.isEmpty()) { LOG.info(" no exchange found!"); continue; } Exchange exchangeByService = exchangeByServices.get(0); UUID exchangeId = exchangeByService.getId(); Exchange exchange = auditRepository.getExchange(exchangeId); Map<String, String> headers = exchange.getHeaders(); String systemUuidStr = headers.get(HeaderKeys.SenderSystemUuid); UUID systemUuid = UUID.fromString(systemUuidStr); int batches = countBatches(exchangeId, serviceUuid, systemUuid); LOG.info(" Most recent exchange had " + batches + " batches"); if (batches > 1 && batches < 2000) { continue; } //go back until we find the FIRST exchange where it broke exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 250, new Date(0), new Date()); for (int i=0; i<exchangeByServices.size(); i++) { exchangeByService = exchangeByServices.get(i); exchangeId = exchangeByService.getId(); batches = countBatches(exchangeId, serviceUuid, systemUuid); exchange = auditRepository.getExchange(exchangeId); Date timestamp = exchange.getTimestamp(); if (batches < 1 || batches > 2000) { LOG.info(" " + timestamp + " had " + batches); } if (batches > 1 && batches < 2000) { LOG.info(" " + timestamp + " had " + batches); break; } } } catch (Exception ex) { LOG.error("", ex); } } LOG.info("Finished finding deleted orgs"); }*/ private static int countBatches(UUID exchangeId, UUID serviceId, UUID systemId) throws Exception { int batches = 0; ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId); for (ExchangeTransformAudit audit: audits) { if (audit.getNumberBatchesCreated() != null) { batches += audit.getNumberBatchesCreated(); } } return batches; } /*private static void fixExchanges(UUID justThisService) { LOG.info("Fixing exchanges"); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId : exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } boolean changed = false; String body = exchange.getBody(); String[] files = body.split("\n"); if (files.length == 0) { continue; } for (int i=0; i<files.length; i++) { String original = files[i]; //remove /r characters String trimmed = original.trim(); //add the new prefix if (!trimmed.startsWith("sftpreader/EMIS001/")) { trimmed = "sftpreader/EMIS001/" + trimmed; } if (!original.equals(trimmed)) { files[i] = trimmed; changed = true; } } if (changed) { LOG.info("Fixed exchange " + exchangeId); LOG.info(body); body = String.join("\n", files); exchange.setBody(body); AuditWriter.writeExchange(exchange); } } } LOG.info("Fixed exchanges"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void deleteDataForService(UUID serviceId) { Service dbService = new ServiceRepository().getById(serviceId); //the delete will take some time, so do the delete in a separate thread LOG.info("Deleting all data for service " + dbService.getName() + " " + dbService.getId()); FhirDeletionService deletor = new FhirDeletionService(dbService); try { deletor.deleteData(); LOG.info("Completed deleting all data for service " + dbService.getName() + " " + dbService.getId()); } catch (Exception ex) { LOG.error("Error deleting service " + dbService.getName() + " " + dbService.getId(), ex); } }*/ /*private static void fixProblems(UUID serviceId, String sharedStoragePath, boolean testMode) { LOG.info("Fixing problems for service " + serviceId); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); List<ExchangeByService> exchangeByServiceList = auditRepository.getExchangesByService(serviceId, Integer.MAX_VALUE); //go backwards as the most recent is first for (int i=exchangeByServiceList.size()-1; i>=0; i--) { ExchangeByService exchangeByService = exchangeByServiceList.get(i); UUID exchangeId = exchangeByService.getExchangeId(); LOG.info("Doing exchange " + exchangeId); EmisCsvHelper helper = null; try { Exchange exchange = AuditWriter.readExchange(exchangeId); String exchangeBody = exchange.getBody(); String[] files = exchangeBody.split(java.lang.System.lineSeparator()); File orgDirectory = validateAndFindCommonDirectory(sharedStoragePath, files); Map<Class, AbstractCsvParser> allParsers = new HashMap<>(); String properVersion = null; String[] versions = new String[]{EmisCsvToFhirTransformer.VERSION_5_0, EmisCsvToFhirTransformer.VERSION_5_1, EmisCsvToFhirTransformer.VERSION_5_3, EmisCsvToFhirTransformer.VERSION_5_4}; for (String version: versions) { try { List<AbstractCsvParser> parsers = new ArrayList<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(Observation.class, orgDirectory, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(DrugRecord.class, orgDirectory, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(IssueRecord.class, orgDirectory, version, true, parsers); for (AbstractCsvParser parser: parsers) { Class cls = parser.getClass(); allParsers.put(cls, parser); } properVersion = version; } catch (Exception ex) { //ignore } } if (allParsers.isEmpty()) { throw new Exception("Failed to open parsers for exchange " + exchangeId + " in folder " + orgDirectory); } UUID systemId = exchange.getHeaderAsUuid(HeaderKeys.SenderSystemUuid); //FhirResourceFiler dummyFiler = new FhirResourceFiler(exchangeId, serviceId, systemId, null, null, 10); if (helper == null) { helper = new EmisCsvHelper(findDataSharingAgreementGuid(new ArrayList<>(allParsers.values()))); } ObservationPreTransformer.transform(properVersion, allParsers, null, helper); IssueRecordPreTransformer.transform(properVersion, allParsers, null, helper); DrugRecordPreTransformer.transform(properVersion, allParsers, null, helper); Map<String, List<String>> problemChildren = helper.getProblemChildMap(); List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (Map.Entry<String, List<String>> entry : problemChildren.entrySet()) { String patientLocallyUniqueId = entry.getKey().split(":")[0]; UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientLocallyUniqueId); if (edsPatientId == null) { throw new Exception("Failed to find edsPatientId for local Patient ID " + patientLocallyUniqueId + " in exchange " + exchangeId); } //find the batch ID for our patient UUID batchId = null; for (ExchangeBatch exchangeBatch: exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null && exchangeBatch.getEdsPatientId().equals(edsPatientId)) { batchId = exchangeBatch.getBatchId(); break; } } if (batchId == null) { throw new Exception("Failed to find batch ID for eds Patient ID " + edsPatientId + " in exchange " + exchangeId); } //find the EDS ID for our problem UUID edsProblemId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Condition, entry.getKey()); if (edsProblemId == null) { LOG.warn("No edsProblemId found for local ID " + entry.getKey() + " - assume bad data referring to non-existing problem?"); //throw new Exception("Failed to find edsProblemId for local Patient ID " + problemLocallyUniqueId + " in exchange " + exchangeId); } //convert our child IDs to EDS references List<Reference> references = new ArrayList<>(); HashSet<String> contentsSet = new HashSet<>(); contentsSet.addAll(entry.getValue()); for (String referenceValue : contentsSet) { Reference reference = ReferenceHelper.createReference(referenceValue); ReferenceComponents components = ReferenceHelper.getReferenceComponents(reference); String locallyUniqueId = components.getId(); ResourceType resourceType = components.getResourceType(); UUID edsResourceId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); Reference globallyUniqueReference = ReferenceHelper.createReference(resourceType, edsResourceId.toString()); references.add(globallyUniqueReference); } //find the resource for the problem itself ResourceByExchangeBatch problemResourceByExchangeBatch = null; List<ResourceByExchangeBatch> resources = resourceRepository.getResourcesForBatch(batchId, ResourceType.Condition.toString()); for (ResourceByExchangeBatch resourceByExchangeBatch: resources) { if (resourceByExchangeBatch.getResourceId().equals(edsProblemId)) { problemResourceByExchangeBatch = resourceByExchangeBatch; break; } } if (problemResourceByExchangeBatch == null) { throw new Exception("Problem not found for edsProblemId " + edsProblemId + " for exchange " + exchangeId); } if (problemResourceByExchangeBatch.getIsDeleted()) { LOG.warn("Problem " + edsProblemId + " is deleted, so not adding to it for exchange " + exchangeId); continue; } String json = problemResourceByExchangeBatch.getResourceData(); Condition fhirProblem = (Condition)PARSER_POOL.parse(json); //update the problems if (fhirProblem.hasContained()) { if (fhirProblem.getContained().size() > 1) { throw new Exception("Problem " + edsProblemId + " is has " + fhirProblem.getContained().size() + " contained resources for exchange " + exchangeId); } fhirProblem.getContained().clear(); } List_ list = new List_(); list.setId("Items"); fhirProblem.getContained().add(list); Extension extension = ExtensionConverter.findExtension(fhirProblem, FhirExtensionUri.PROBLEM_ASSOCIATED_RESOURCE); if (extension == null) { Reference listReference = ReferenceHelper.createInternalReference("Items"); fhirProblem.addExtension(ExtensionConverter.createExtension(FhirExtensionUri.PROBLEM_ASSOCIATED_RESOURCE, listReference)); } for (Reference reference : references) { list.addEntry().setItem(reference); } String newJson = FhirSerializationHelper.serializeResource(fhirProblem); if (newJson.equals(json)) { LOG.warn("Skipping edsProblemId " + edsProblemId + " as JSON hasn't changed"); continue; } problemResourceByExchangeBatch.setResourceData(newJson); String resourceType = problemResourceByExchangeBatch.getResourceType(); UUID versionUuid = problemResourceByExchangeBatch.getVersion(); ResourceHistory problemResourceHistory = resourceRepository.getResourceHistoryByKey(edsProblemId, resourceType, versionUuid); problemResourceHistory.setResourceData(newJson); problemResourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); ResourceByService problemResourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType, edsProblemId); if (problemResourceByService.getResourceData() == null) { problemResourceByService = null; LOG.warn("Not updating edsProblemId " + edsProblemId + " for exchange " + exchangeId + " as it's been subsequently delrted"); } else { problemResourceByService.setResourceData(newJson); } //save back to THREE tables if (!testMode) { resourceRepository.save(problemResourceByExchangeBatch); resourceRepository.save(problemResourceHistory); if (problemResourceByService != null) { resourceRepository.save(problemResourceByService); } LOG.info("Fixed edsProblemId " + edsProblemId + " for exchange Id " + exchangeId); } else { LOG.info("Would change edsProblemId " + edsProblemId + " to new JSON"); LOG.info(newJson); } } } catch (Exception ex) { LOG.error("Failed on exchange " + exchangeId, ex); break; } } LOG.info("Finished fixing problems for service " + serviceId); } private static String findDataSharingAgreementGuid(List<AbstractCsvParser> parsers) throws Exception { //we need a file name to work out the data sharing agreement ID, so just the first file we can find File f = parsers .iterator() .next() .getFile(); String name = Files.getNameWithoutExtension(f.getName()); String[] toks = name.split("_"); if (toks.length != 5) { throw new TransformException("Failed to extract data sharing agreement GUID from filename " + f.getName()); } return toks[4]; } private static void closeParsers(Collection<AbstractCsvParser> parsers) { for (AbstractCsvParser parser : parsers) { try { parser.close(); } catch (IOException ex) { //don't worry if this fails, as we're done anyway } } } private static File validateAndFindCommonDirectory(String sharedStoragePath, String[] files) throws Exception { String organisationDir = null; for (String file: files) { File f = new File(sharedStoragePath, file); if (!f.exists()) { LOG.error("Failed to find file {} in shared storage {}", file, sharedStoragePath); throw new FileNotFoundException("" + f + " doesn't exist"); } //LOG.info("Successfully found file {} in shared storage {}", file, sharedStoragePath); try { File orgDir = f.getParentFile(); if (organisationDir == null) { organisationDir = orgDir.getAbsolutePath(); } else { if (!organisationDir.equalsIgnoreCase(orgDir.getAbsolutePath())) { throw new Exception(); } } } catch (Exception ex) { throw new FileNotFoundException("" + f + " isn't in the expected directory structure within " + organisationDir); } } return new File(organisationDir); }*/ /*private static void testLogging() { while (true) { System.out.println("Checking logging at " + System.currentTimeMillis()); try { Thread.sleep(4000); } catch (Exception e) { e.printStackTrace(); } LOG.trace("trace logging"); LOG.debug("debug logging"); LOG.info("info logging"); LOG.warn("warn logging"); LOG.error("error logging"); } } */ /*private static void fixExchangeProtocols() { LOG.info("Fixing exchange protocols"); AuditRepository auditRepository = new AuditRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.Exchange LIMIT 1000;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); LOG.info("Processing exchange " + exchangeId); Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } UUID serviceId = UUID.fromString(serviceIdStr); List<String> newIds = new ArrayList<>(); String protocolJson = headers.get(HeaderKeys.Protocols); if (!headers.containsKey(HeaderKeys.Protocols)) { try { List<LibraryItem> libraryItemList = LibraryRepositoryHelper.getProtocolsByServiceId(serviceIdStr); // Get protocols where service is publisher newIds = libraryItemList.stream() .filter( libraryItem -> libraryItem.getProtocol().getServiceContract().stream() .anyMatch(sc -> sc.getType().equals(ServiceContractType.PUBLISHER) && sc.getService().getUuid().equals(serviceIdStr))) .map(t -> t.getUuid().toString()) .collect(Collectors.toList()); } catch (Exception e) { LOG.error("Failed to find protocols for exchange " + exchange.getExchangeId(), e); continue; } } else { try { JsonNode node = ObjectMapperPool.getInstance().readTree(protocolJson); for (int i = 0; i < node.size(); i++) { JsonNode libraryItemNode = node.get(i); JsonNode idNode = libraryItemNode.get("uuid"); String id = idNode.asText(); newIds.add(id); } } catch (Exception e) { LOG.error("Failed to read Json from " + protocolJson + " for exchange " + exchange.getExchangeId(), e); continue; } } try { if (newIds.isEmpty()) { headers.remove(HeaderKeys.Protocols); } else { String protocolsJson = ObjectMapperPool.getInstance().writeValueAsString(newIds.toArray()); headers.put(HeaderKeys.Protocols, protocolsJson); } } catch (JsonProcessingException e) { LOG.error("Unable to serialize protocols to JSON for exchange " + exchange.getExchangeId(), e); continue; } try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(headerJson); } catch (JsonProcessingException e) { LOG.error("Failed to write exchange headers to Json for exchange " + exchange.getExchangeId(), e); continue; } auditRepository.save(exchange); } LOG.info("Finished fixing exchange protocols"); }*/ /*private static void fixExchangeHeaders() { LOG.info("Fixing exchange headers"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); OrganisationRepository organisationRepository = new OrganisationRepository(); List<Exchange> exchanges = new AuditRepository().getAllExchanges(); for (Exchange exchange: exchanges) { String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } if (headers.containsKey(HeaderKeys.SenderLocalIdentifier) && headers.containsKey(HeaderKeys.SenderOrganisationUuid)) { continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); Map<UUID, String> orgMap = service.getOrganisations(); if (orgMap.size() != 1) { LOG.error("Wrong number of orgs in service " + serviceId + " for exchange " + exchange.getExchangeId()); continue; } UUID orgId = orgMap .keySet() .stream() .collect(StreamExtension.firstOrNullCollector()); Organisation organisation = organisationRepository.getById(orgId); String odsCode = organisation.getNationalId(); headers.put(HeaderKeys.SenderLocalIdentifier, odsCode); headers.put(HeaderKeys.SenderOrganisationUuid, orgId.toString()); try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setHeaders(headerJson); auditRepository.save(exchange); LOG.info("Creating exchange " + exchange.getExchangeId()); } LOG.info("Finished fixing exchange headers"); }*/ /*private static void fixExchangeHeaders() { LOG.info("Fixing exchange headers"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); OrganisationRepository organisationRepository = new OrganisationRepository(); LibraryRepository libraryRepository = new LibraryRepository(); List<Exchange> exchanges = new AuditRepository().getAllExchanges(); for (Exchange exchange: exchanges) { String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } boolean changed = false; UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); try { List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint : endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString(); ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId); Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId()); LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent()); System system = libraryItem.getSystem(); for (TechnicalInterface technicalInterface : system.getTechnicalInterface()) { if (endpointInterfaceId.equals(technicalInterface.getUuid())) { if (!headers.containsKey(HeaderKeys.SourceSystem)) { headers.put(HeaderKeys.SourceSystem, technicalInterface.getMessageFormat()); changed = true; } if (!headers.containsKey(HeaderKeys.SystemVersion)) { headers.put(HeaderKeys.SystemVersion, technicalInterface.getMessageFormatVersion()); changed = true; } if (!headers.containsKey(HeaderKeys.SenderSystemUuid)) { headers.put(HeaderKeys.SenderSystemUuid, endpointSystemId.toString()); changed = true; } } } } } catch (Exception e) { LOG.error("Failed to find endpoint details for " + exchange.getExchangeId()); continue; } if (changed) { try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setHeaders(headerJson); auditRepository.save(exchange); LOG.info("Fixed exchange " + exchange.getExchangeId()); } } LOG.info("Finished fixing exchange headers"); }*/ /*private static void testConnection(String configName) { try { JsonNode config = ConfigManager.getConfigurationAsJson(configName, "enterprise"); String driverClass = config.get("driverClass").asText(); String url = config.get("url").asText(); String username = config.get("username").asText(); String password = config.get("password").asText(); //force the driver to be loaded Class.forName(driverClass); Connection conn = DriverManager.getConnection(url, username, password); conn.setAutoCommit(false); LOG.info("Connection ok"); conn.close(); } catch (Exception e) { LOG.error("", e); } }*/ /*private static void testConnection() { try { JsonNode config = ConfigManager.getConfigurationAsJson("postgres", "enterprise"); String url = config.get("url").asText(); String username = config.get("username").asText(); String password = config.get("password").asText(); //force the driver to be loaded Class.forName("org.postgresql.Driver"); Connection conn = DriverManager.getConnection(url, username, password); conn.setAutoCommit(false); LOG.info("Connection ok"); conn.close(); } catch (Exception e) { LOG.error("", e); } }*/ /*private static void startEnterpriseStream(UUID serviceId, String configName, UUID exchangeIdStartFrom, UUID batchIdStartFrom) throws Exception { LOG.info("Starting Enterprise Streaming for " + serviceId + " using " + configName + " starting from exchange " + exchangeIdStartFrom + " and batch " + batchIdStartFrom); LOG.info("Testing database connection"); testConnection(configName); Service service = new ServiceRepository().getById(serviceId); List<UUID> orgIds = new ArrayList<>(service.getOrganisations().keySet()); UUID orgId = orgIds.get(0); List<ExchangeByService> exchangeByServiceList = new AuditRepository().getExchangesByService(serviceId, Integer.MAX_VALUE); for (int i=exchangeByServiceList.size()-1; i>=0; i--) { ExchangeByService exchangeByService = exchangeByServiceList.get(i); //for (ExchangeByService exchangeByService: exchangeByServiceList) { UUID exchangeId = exchangeByService.getExchangeId(); if (exchangeIdStartFrom != null) { if (!exchangeIdStartFrom.equals(exchangeId)) { continue; } else { //once we have a match, set to null so we don't skip any subsequent ones exchangeIdStartFrom = null; } } Exchange exchange = AuditWriter.readExchange(exchangeId); String senderOrgUuidStr = exchange.getHeader(HeaderKeys.SenderOrganisationUuid); UUID senderOrgUuid = UUID.fromString(senderOrgUuidStr); //this one had 90,000 batches and doesn't need doing again *//*if (exchangeId.equals(UUID.fromString("b9b93be0-afd8-11e6-8c16-c1d5a00342f3"))) { LOG.info("Skipping exchange " + exchangeId); continue; }*//* List<ExchangeBatch> exchangeBatches = new ExchangeBatchRepository().retrieveForExchangeId(exchangeId); LOG.info("Processing exchange " + exchangeId + " with " + exchangeBatches.size() + " batches"); for (int j=0; j<exchangeBatches.size(); j++) { ExchangeBatch exchangeBatch = exchangeBatches.get(j); UUID batchId = exchangeBatch.getBatchId(); if (batchIdStartFrom != null) { if (!batchIdStartFrom.equals(batchId)) { continue; } else { batchIdStartFrom = null; } } LOG.info("Processing exchange " + exchangeId + " and batch " + batchId + " " + (j+1) + "/" + exchangeBatches.size()); try { String outbound = FhirToEnterpriseCsvTransformer.transformFromFhir(senderOrgUuid, batchId, null); if (!Strings.isNullOrEmpty(outbound)) { EnterpriseFiler.file(outbound, configName); } } catch (Exception ex) { throw new PipelineException("Failed to process exchange " + exchangeId + " and batch " + batchId, ex); } } } }*/ /*private static void fixMissingExchanges() { LOG.info("Fixing missing exchanges"); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id, batch_id, inserted_at FROM ehr.exchange_batch LIMIT 600000;"); stmt.setFetchSize(100); Set<UUID> exchangeIdsDone = new HashSet<>(); AuditRepository auditRepository = new AuditRepository(); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); UUID batchId = row.get(1, UUID.class); Date date = row.getTimestamp(2); //LOG.info("Exchange " + exchangeId + " batch " + batchId + " date " + date); if (exchangeIdsDone.contains(exchangeId)) { continue; } if (auditRepository.getExchange(exchangeId) != null) { continue; } UUID serviceId = findServiceId(batchId, session); if (serviceId == null) { continue; } Exchange exchange = new Exchange(); ExchangeByService exchangeByService = new ExchangeByService(); ExchangeEvent exchangeEvent = new ExchangeEvent(); Map<String, String> headers = new HashMap<>(); headers.put(HeaderKeys.SenderServiceUuid, serviceId.toString()); String headersJson = null; try { headersJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setBody("Body not available, as exchange re-created"); exchange.setExchangeId(exchangeId); exchange.setHeaders(headersJson); exchange.setTimestamp(date); exchangeByService.setExchangeId(exchangeId); exchangeByService.setServiceId(serviceId); exchangeByService.setTimestamp(date); exchangeEvent.setEventDesc("Created_By_Conversion"); exchangeEvent.setExchangeId(exchangeId); exchangeEvent.setTimestamp(new Date()); auditRepository.save(exchange); auditRepository.save(exchangeEvent); auditRepository.save(exchangeByService); exchangeIdsDone.add(exchangeId); LOG.info("Creating exchange " + exchangeId); } LOG.info("Finished exchange fix"); } private static UUID findServiceId(UUID batchId, Session session) { Statement stmt = new SimpleStatement("select resource_type, resource_id from ehr.resource_by_exchange_batch where batch_id = " + batchId + " LIMIT 1;"); ResultSet rs = session.execute(stmt); if (rs.isExhausted()) { LOG.error("Failed to find resource_by_exchange_batch for batch_id " + batchId); return null; } Row row = rs.one(); String resourceType = row.getString(0); UUID resourceId = row.get(1, UUID.class); stmt = new SimpleStatement("select service_id from ehr.resource_history where resource_type = '" + resourceType + "' and resource_id = " + resourceId + " LIMIT 1;"); rs = session.execute(stmt); if (rs.isExhausted()) { LOG.error("Failed to find resource_history for resource_type " + resourceType + " and resource_id " + resourceId); return null; } row = rs.one(); UUID serviceId = row.get(0, UUID.class); return serviceId; }*/ /*private static void fixExchangeEvents() { List<ExchangeEvent> events = new AuditRepository().getAllExchangeEvents(); for (ExchangeEvent event: events) { if (event.getEventDesc() != null) { continue; } String eventDesc = ""; int eventType = event.getEvent().intValue(); switch (eventType) { case 1: eventDesc = "Receive"; break; case 2: eventDesc = "Validate"; break; case 3: eventDesc = "Transform_Start"; break; case 4: eventDesc = "Transform_End"; break; case 5: eventDesc = "Send"; break; default: eventDesc = "??? " + eventType; } event.setEventDesc(eventDesc); new AuditRepository().save(null, event); } }*/ /*private static void fixExchanges() { AuditRepository auditRepository = new AuditRepository(); Map<UUID, Set<UUID>> existingOnes = new HashMap(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); List<Exchange> exchanges = auditRepository.getAllExchanges(); for (Exchange exchange: exchanges) { UUID exchangeUuid = exchange.getExchangeId(); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeUuid + " and Json " + headerJson); continue; } *//*String serviceId = headers.get(HeaderKeys.SenderServiceUuid); if (serviceId == null) { LOG.warn("No service ID found for exchange " + exchange.getExchangeId()); continue; } UUID serviceUuid = UUID.fromString(serviceId); Set<UUID> exchangeIdsDone = existingOnes.get(serviceUuid); if (exchangeIdsDone == null) { exchangeIdsDone = new HashSet<>(); List<ExchangeByService> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, Integer.MAX_VALUE); for (ExchangeByService exchangeByService: exchangeByServices) { exchangeIdsDone.add(exchangeByService.getExchangeId()); } existingOnes.put(serviceUuid, exchangeIdsDone); } //create the exchange by service entity if (!exchangeIdsDone.contains(exchangeUuid)) { Date timestamp = exchange.getTimestamp(); ExchangeByService newOne = new ExchangeByService(); newOne.setExchangeId(exchangeUuid); newOne.setServiceId(serviceUuid); newOne.setTimestamp(timestamp); auditRepository.save(newOne); }*//* try { headers.remove(HeaderKeys.BatchIdsJson); String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(newHeaderJson); auditRepository.save(exchange); } catch (JsonProcessingException e) { LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e); } if (!headers.containsKey(HeaderKeys.BatchIdsJson)) { //fix the batch IDs not being in the exchange List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeUuid); if (!batches.isEmpty()) { List<UUID> batchUuids = batches .stream() .map(t -> t.getBatchId()) .collect(Collectors.toList()); try { String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchUuids.toArray()); headers.put(HeaderKeys.BatchIdsJson, batchUuidsStr); String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(newHeaderJson); auditRepository.save(exchange, null); } catch (JsonProcessingException e) { LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e); } } //} } }*/ /*private static UUID findSystemId(Service service, String software, String messageVersion) throws PipelineException { List<JsonServiceInterfaceEndpoint> endpoints = null; try { endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString(); LibraryRepository libraryRepository = new LibraryRepository(); ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId); Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId()); LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent()); System system = libraryItem.getSystem(); for (TechnicalInterface technicalInterface: system.getTechnicalInterface()) { if (endpointInterfaceId.equals(technicalInterface.getUuid()) && technicalInterface.getMessageFormat().equalsIgnoreCase(software) && technicalInterface.getMessageFormatVersion().equalsIgnoreCase(messageVersion)) { return endpointSystemId; } } } } catch (Exception e) { throw new PipelineException("Failed to process endpoints from service " + service.getId()); } return null; } */ /*private static void addSystemIdToExchangeHeaders() throws Exception { LOG.info("populateExchangeBatchPatients"); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); ServiceRepository serviceRepository = new ServiceRepository(); //OrganisationRepository organisationRepository = new OrganisationRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson); continue; } if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))) { LOG.info("Skipping exchange " + exchangeId + " as no service UUID"); continue; } if (!Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) { LOG.info("Skipping exchange " + exchangeId + " as already got system UUID"); continue; } try { //work out service ID String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); UUID serviceId = UUID.fromString(serviceIdStr); String software = headers.get(HeaderKeys.SourceSystem); String version = headers.get(HeaderKeys.SystemVersion); Service service = serviceRepository.getById(serviceId); UUID systemUuid = findSystemId(service, software, version); headers.put(HeaderKeys.SenderSystemUuid, systemUuid.toString()); //work out protocol IDs try { String newProtocolIdsJson = DetermineRelevantProtocolIds.getProtocolIdsForPublisherService(serviceIdStr); headers.put(HeaderKeys.ProtocolIds, newProtocolIdsJson); } catch (Exception ex) { LOG.error("Failed to recalculate protocols for " + exchangeId + ": " + ex.getMessage()); } //save to DB headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(headerJson); auditRepository.save(exchange); } catch (Exception ex) { LOG.error("Error with exchange " + exchangeId, ex); } } LOG.info("Finished populateExchangeBatchPatients"); }*/ /*private static void populateExchangeBatchPatients() throws Exception { LOG.info("populateExchangeBatchPatients"); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); //ServiceRepository serviceRepository = new ServiceRepository(); //OrganisationRepository organisationRepository = new OrganisationRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson); continue; } if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid)) || Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) { LOG.info("Skipping exchange " + exchangeId + " because no service or system in header"); continue; } try { UUID serviceId = UUID.fromString(headers.get(HeaderKeys.SenderServiceUuid)); UUID systemId = UUID.fromString(headers.get(HeaderKeys.SenderSystemUuid)); List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch : exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null) { continue; } UUID batchId = exchangeBatch.getBatchId(); List<ResourceByExchangeBatch> resourceWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Patient.toString()); if (resourceWrappers.isEmpty()) { continue; } List<UUID> patientIds = new ArrayList<>(); for (ResourceByExchangeBatch resourceWrapper : resourceWrappers) { UUID patientId = resourceWrapper.getResourceId(); if (resourceWrapper.getIsDeleted()) { deleteEntirePatientRecord(patientId, serviceId, systemId, exchangeId, batchId); } if (!patientIds.contains(patientId)) { patientIds.add(patientId); } } if (patientIds.size() != 1) { LOG.info("Skipping exchange " + exchangeId + " and batch " + batchId + " because found " + patientIds.size() + " patient IDs"); continue; } UUID patientId = patientIds.get(0); exchangeBatch.setEdsPatientId(patientId); exchangeBatchRepository.save(exchangeBatch); } } catch (Exception ex) { LOG.error("Error with exchange " + exchangeId, ex); } } LOG.info("Finished populateExchangeBatchPatients"); } private static void deleteEntirePatientRecord(UUID patientId, UUID serviceId, UUID systemId, UUID exchangeId, UUID batchId) throws Exception { FhirStorageService storageService = new FhirStorageService(serviceId, systemId); ResourceRepository resourceRepository = new ResourceRepository(); List<ResourceByPatient> resourceWrappers = resourceRepository.getResourcesByPatient(serviceId, systemId, patientId); for (ResourceByPatient resourceWrapper: resourceWrappers) { String json = resourceWrapper.getResourceData(); Resource resource = new JsonParser().parse(json); storageService.exchangeBatchDelete(exchangeId, batchId, resource); } }*/ /*private static void convertPatientSearch() { LOG.info("Converting Patient Search"); ResourceRepository resourceRepository = new ResourceRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); LOG.info("Doing service " + service.getName()); for (UUID systemId : findSystemIds(service)) { List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.EpisodeOfCare.toString()); for (ResourceByService resourceWrapper: resourceWrappers) { if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) { continue; } try { EpisodeOfCare episodeOfCare = (EpisodeOfCare) new JsonParser().parse(resourceWrapper.getResourceData()); String patientId = ReferenceHelper.getReferenceId(episodeOfCare.getPatient()); ResourceHistory patientWrapper = resourceRepository.getCurrentVersion(ResourceType.Patient.toString(), UUID.fromString(patientId)); if (Strings.isNullOrEmpty(patientWrapper.getResourceData())) { continue; } Patient patient = (Patient) new JsonParser().parse(patientWrapper.getResourceData()); PatientSearchHelper.update(serviceId, systemId, patient); PatientSearchHelper.update(serviceId, systemId, episodeOfCare); } catch (Exception ex) { LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex); } } } } LOG.info("Converted Patient Search"); } catch (Exception ex) { LOG.error("", ex); } }*/ private static List<UUID> findSystemIds(Service service) throws Exception { List<UUID> ret = new ArrayList<>(); List<JsonServiceInterfaceEndpoint> endpoints = null; try { endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); ret.add(endpointSystemId); } } catch (Exception e) { throw new Exception("Failed to process endpoints from service " + service.getId()); } return ret; } /*private static void convertPatientLink() { LOG.info("Converting Patient Link"); ResourceRepository resourceRepository = new ResourceRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); LOG.info("Doing service " + service.getName()); for (UUID systemId : findSystemIds(service)) { List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.Patient.toString()); for (ResourceByService resourceWrapper: resourceWrappers) { if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) { continue; } try { Patient patient = (Patient)new JsonParser().parse(resourceWrapper.getResourceData()); PatientLinkHelper.updatePersonId(patient); } catch (Exception ex) { LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex); } } } } LOG.info("Converted Patient Link"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixConfidentialPatients(String sharedStoragePath, UUID justThisService) { LOG.info("Fixing Confidential Patients using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager(); Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class); Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); Map<String, ResourceHistory> resourcesFixed = new HashMap<>(); Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Set<UUID> batchIdsToPutInProtocolQueue = new HashSet<>(); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f); EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId); ResourceFiler filer = new ResourceFiler(exchangeId, serviceId, systemId, null, null, 1); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers); ProblemPreTransformer.transform(version, parsers, filer, helper); ObservationPreTransformer.transform(version, parsers, filer, helper); DrugRecordPreTransformer.transform(version, parsers, filer, helper); IssueRecordPreTransformer.transform(version, parsers, filer, helper); DiaryPreTransformer.transform(version, parsers, filer, helper); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient)parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getIsConfidential() && !patientParser.getDeleted()) { PatientTransformer.createResource(patientParser, filer, helper, version); } } patientParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class); while (consultationParser.nextRecord()) { if (consultationParser.getIsConfidential() && !consultationParser.getDeleted()) { ConsultationTransformer.createResource(consultationParser, filer, helper, version); } } consultationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { if (observationParser.getIsConfidential() && !observationParser.getDeleted()) { ObservationTransformer.createResource(observationParser, filer, helper, version); } } observationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class); while (diaryParser.nextRecord()) { if (diaryParser.getIsConfidential() && !diaryParser.getDeleted()) { DiaryTransformer.createResource(diaryParser, filer, helper, version); } } diaryParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class); while (drugRecordParser.nextRecord()) { if (drugRecordParser.getIsConfidential() && !drugRecordParser.getDeleted()) { DrugRecordTransformer.createResource(drugRecordParser, filer, helper, version); } } drugRecordParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class); while (issueRecordParser.nextRecord()) { if (issueRecordParser.getIsConfidential() && !issueRecordParser.getDeleted()) { IssueRecordTransformer.createResource(issueRecordParser, filer, helper, version); } } issueRecordParser.close(); filer.waitToFinish(); //just to close the thread pool, even though it's not been used List<Resource> resources = filer.getNewResources(); for (Resource resource: resources) { String patientId = IdHelper.getPatientId(resource); UUID edsPatientId = UUID.fromString(patientId); ResourceType resourceType = resource.getResourceType(); UUID resourceId = UUID.fromString(resource.getId()); boolean foundResourceInDbBatch = false; List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds != null) { for (UUID batchId : batchIds) { List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), resourceId); if (resourceByExchangeBatches.isEmpty()) { //if we've deleted data, this will be null continue; } foundResourceInDbBatch = true; for (ResourceByExchangeBatch resourceByExchangeBatch : resourceByExchangeBatches) { String json = resourceByExchangeBatch.getResourceData(); if (!Strings.isNullOrEmpty(json)) { LOG.warn("JSON already in resource " + resourceType + " " + resourceId); } else { json = parserPool.composeString(resource); resourceByExchangeBatch.setResourceData(json); resourceByExchangeBatch.setIsDeleted(false); resourceByExchangeBatch.setSchemaVersion("0.1"); LOG.info("Saved resource by batch " + resourceType + " " + resourceId + " in batch " + batchId); UUID versionUuid = resourceByExchangeBatch.getVersion(); ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(resourceId, resourceType.toString(), versionUuid); if (resourceHistory == null) { throw new Exception("Failed to find resource history for " + resourceType + " " + resourceId + " and version " + versionUuid); } resourceHistory.setIsDeleted(false); resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); resourceHistory.setSchemaVersion("0.1"); resourceRepository.save(resourceByExchangeBatch); resourceRepository.save(resourceHistory); batchIdsToPutInProtocolQueue.add(batchId); String key = resourceType.toString() + ":" + resourceId; resourcesFixed.put(key, resourceHistory); } //if a patient became confidential, we will have deleted all resources for that //patient, so we need to undo that too //to undelete WHOLE patient record //1. if THIS resource is a patient //2. get all other deletes from the same exchange batch //3. delete those from resource_by_exchange_batch (the deleted ones only) //4. delete same ones from resource_history //5. retrieve most recent resource_history //6. if not deleted, add to resources fixed if (resourceType == ResourceType.Patient) { List<ResourceByExchangeBatch> resourcesInSameBatch = resourceRepository.getResourcesForBatch(batchId); LOG.info("Undeleting " + resourcesInSameBatch.size() + " resources for batch " + batchId); for (ResourceByExchangeBatch resourceInSameBatch: resourcesInSameBatch) { if (!resourceInSameBatch.getIsDeleted()) { continue; } //patient and episode resources will be restored by the above stuff, so don't try //to do it again if (resourceInSameBatch.getResourceType().equals(ResourceType.Patient.toString()) || resourceInSameBatch.getResourceType().equals(ResourceType.EpisodeOfCare.toString())) { continue; } ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(resourceInSameBatch.getResourceId(), resourceInSameBatch.getResourceType(), resourceInSameBatch.getVersion()); mapperResourceByExchangeBatch.delete(resourceInSameBatch); mapperResourceHistory.delete(deletedResourceHistory); batchIdsToPutInProtocolQueue.add(batchId); //check the most recent version of our resource, and if it's not deleted, add to the list to update the resource_by_service table ResourceHistory mostRecentDeletedResourceHistory = resourceRepository.getCurrentVersion(resourceInSameBatch.getResourceType(), resourceInSameBatch.getResourceId()); if (mostRecentDeletedResourceHistory != null && !mostRecentDeletedResourceHistory.getIsDeleted()) { String key2 = mostRecentDeletedResourceHistory.getResourceType().toString() + ":" + mostRecentDeletedResourceHistory.getResourceId(); resourcesFixed.put(key2, mostRecentDeletedResourceHistory); } } } } } } //if we didn't find records in the DB to update, then if (!foundResourceInDbBatch) { //we can't generate a back-dated time UUID, but we need one so the resource_history //table is in order. To get a suitable time UUID, we just pull out the first exchange batch for our exchange, //and the batch ID is actually a time UUID that was allocated around the right time ExchangeBatch firstBatch = exchangeBatchRepository.retrieveFirstForExchangeId(exchangeId); //if there was no batch for the exchange, then the exchange wasn't processed at all. So skip this exchange //and we'll pick up the same patient data in a following exchange if (firstBatch == null) { continue; } UUID versionUuid = firstBatch.getBatchId(); //find suitable batch ID UUID batchId = null; if (batchIds != null && batchIds.size() > 0) { batchId = batchIds.get(batchIds.size()-1); } else { //create new batch ID if not found ExchangeBatch exchangeBatch = new ExchangeBatch(); exchangeBatch.setBatchId(UUIDs.timeBased()); exchangeBatch.setExchangeId(exchangeId); exchangeBatch.setInsertedAt(new Date()); exchangeBatch.setEdsPatientId(edsPatientId); exchangeBatchRepository.save(exchangeBatch); batchId = exchangeBatch.getBatchId(); //add to map for next resource if (batchIds == null) { batchIds = new ArrayList<>(); } batchIds.add(batchId); batchesPerPatient.put(edsPatientId, batchIds); } String json = parserPool.composeString(resource); ResourceHistory resourceHistory = new ResourceHistory(); resourceHistory.setResourceId(resourceId); resourceHistory.setResourceType(resourceType.toString()); resourceHistory.setVersion(versionUuid); resourceHistory.setCreatedAt(new Date()); resourceHistory.setServiceId(serviceId); resourceHistory.setSystemId(systemId); resourceHistory.setIsDeleted(false); resourceHistory.setSchemaVersion("0.1"); resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); ResourceByExchangeBatch resourceByExchangeBatch = new ResourceByExchangeBatch(); resourceByExchangeBatch.setBatchId(batchId); resourceByExchangeBatch.setExchangeId(exchangeId); resourceByExchangeBatch.setResourceType(resourceType.toString()); resourceByExchangeBatch.setResourceId(resourceId); resourceByExchangeBatch.setVersion(versionUuid); resourceByExchangeBatch.setIsDeleted(false); resourceByExchangeBatch.setSchemaVersion("0.1"); resourceByExchangeBatch.setResourceData(json); resourceRepository.save(resourceHistory); resourceRepository.save(resourceByExchangeBatch); batchIdsToPutInProtocolQueue.add(batchId); } } if (!batchIdsToPutInProtocolQueue.isEmpty()) { exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchIdsToPutInProtocolQueue); } } //update the resource_by_service table (and the resource_by_patient view) for (ResourceHistory resourceHistory: resourcesFixed.values()) { UUID latestVersionUpdatedUuid = resourceHistory.getVersion(); ResourceHistory latestVersion = resourceRepository.getCurrentVersion(resourceHistory.getResourceType(), resourceHistory.getResourceId()); UUID latestVersionUuid = latestVersion.getVersion(); //if there have been subsequent updates to the resource, then skip it if (!latestVersionUuid.equals(latestVersionUpdatedUuid)) { continue; } Resource resource = parserPool.parse(resourceHistory.getResourceData()); ResourceMetadata metadata = MetadataFactory.createMetadata(resource); UUID patientId = ((PatientCompartment)metadata).getPatientId(); ResourceByService resourceByService = new ResourceByService(); resourceByService.setServiceId(resourceHistory.getServiceId()); resourceByService.setSystemId(resourceHistory.getSystemId()); resourceByService.setResourceType(resourceHistory.getResourceType()); resourceByService.setResourceId(resourceHistory.getResourceId()); resourceByService.setCurrentVersion(resourceHistory.getVersion()); resourceByService.setUpdatedAt(resourceHistory.getCreatedAt()); resourceByService.setPatientId(patientId); resourceByService.setSchemaVersion(resourceHistory.getSchemaVersion()); resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata)); resourceByService.setResourceData(resourceHistory.getResourceData()); resourceRepository.save(resourceByService); //call out to our patient search and person matching services if (resource instanceof Patient) { PatientLinkHelper.updatePersonId((Patient)resource); PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (Patient)resource); } else if (resource instanceof EpisodeOfCare) { PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (EpisodeOfCare)resource); } } if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) { //find the config for our protocol queue String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) { Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } } LOG.info("Finished Fixing Confidential Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixDeletedAppointments(String sharedStoragePath, boolean saveChanges, UUID justThisService) { LOG.info("Fixing Deleted Appointments using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager(); Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class); Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class, dir, version, true, parsers); //find any deleted patients List<UUID> deletedPatientUuids = new ArrayList<>(); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getDeleted()) { //find the EDS patient ID for this local guid String patientGuid = patientParser.getPatientGuid(); UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid); } deletedPatientUuids.add(edsPatientId); } } patientParser.close(); //go through the appts file to find properly deleted appt GUIDS List<UUID> deletedApptUuids = new ArrayList<>(); org.endeavourhealth.transform.emis.csv.schema.appointment.Slot apptParser = (org.endeavourhealth.transform.emis.csv.schema.appointment.Slot) parsers.get(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class); while (apptParser.nextRecord()) { if (apptParser.getDeleted()) { String patientGuid = apptParser.getPatientGuid(); String slotGuid = apptParser.getSlotGuid(); if (!Strings.isNullOrEmpty(patientGuid)) { String uniqueLocalId = EmisCsvHelper.createUniqueId(patientGuid, slotGuid); UUID edsApptId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Appointment, uniqueLocalId); deletedApptUuids.add(edsApptId); } } } apptParser.close(); for (UUID edsPatientId : deletedPatientUuids) { List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds == null) { //if there are no batches for this patient, we'll be handling this data in another exchange continue; } for (UUID batchId : batchIds) { List<ResourceByExchangeBatch> apptWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Appointment.toString()); for (ResourceByExchangeBatch apptWrapper : apptWrappers) { //ignore non-deleted appts if (!apptWrapper.getIsDeleted()) { continue; } //if the appt was deleted legitamately, then skip it UUID apptId = apptWrapper.getResourceId(); if (deletedApptUuids.contains(apptId)) { continue; } ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(apptWrapper.getResourceId(), apptWrapper.getResourceType(), apptWrapper.getVersion()); if (saveChanges) { mapperResourceByExchangeBatch.delete(apptWrapper); mapperResourceHistory.delete(deletedResourceHistory); } LOG.info("Un-deleted " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " in batch " + batchId + " patient " + edsPatientId); //now get the most recent instance of the appointment, and if it's NOT deleted, insert into the resource_by_service table ResourceHistory mostRecentResourceHistory = resourceRepository.getCurrentVersion(apptWrapper.getResourceType(), apptWrapper.getResourceId()); if (mostRecentResourceHistory != null && !mostRecentResourceHistory.getIsDeleted()) { Resource resource = parserPool.parse(mostRecentResourceHistory.getResourceData()); ResourceMetadata metadata = MetadataFactory.createMetadata(resource); UUID patientId = ((PatientCompartment) metadata).getPatientId(); ResourceByService resourceByService = new ResourceByService(); resourceByService.setServiceId(mostRecentResourceHistory.getServiceId()); resourceByService.setSystemId(mostRecentResourceHistory.getSystemId()); resourceByService.setResourceType(mostRecentResourceHistory.getResourceType()); resourceByService.setResourceId(mostRecentResourceHistory.getResourceId()); resourceByService.setCurrentVersion(mostRecentResourceHistory.getVersion()); resourceByService.setUpdatedAt(mostRecentResourceHistory.getCreatedAt()); resourceByService.setPatientId(patientId); resourceByService.setSchemaVersion(mostRecentResourceHistory.getSchemaVersion()); resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata)); resourceByService.setResourceData(mostRecentResourceHistory.getResourceData()); if (saveChanges) { resourceRepository.save(resourceByService); } LOG.info("Restored " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " to resource_by_service table"); } } } } } } LOG.info("Finished Deleted Appointments Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ private static void fixSlotReferencesForPublisher(String publisher) { try { ServiceDalI dal = DalProvider.factoryServiceDal(); List<Service> services = dal.getAll(); for (Service service: services) { if (service.getPublisherConfigName() != null && service.getPublisherConfigName().equals(publisher)) { fixSlotReferences(service.getId()); } } } catch (Exception ex) { LOG.error("", ex); } } private static void fixSlotReferences(UUID serviceId) { LOG.info("Fixing Slot References in Appointments for " + serviceId); try { //get patient IDs from patient search List<UUID> patientIds = new ArrayList<>(); EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); String sql = "SELECT eds_id FROM publisher_transform_02.resource_id_map WHERE service_id = '" + serviceId + "'AND resource_type = '" + ResourceType.Patient + "';"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { String patientUuid = rs.getString(1); patientIds.add(UUID.fromString(patientUuid)); } rs.close(); statement.close(); connection.close(); /* EntityManager entityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); String sql = "SELECT patient_id FROM patient_search WHERE service_id = '" + serviceId.toString() + "'"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { String patientUuid = rs.getString(1); patientIds.add(UUID.fromString(patientUuid)); } rs.close(); statement.close(); connection.close();*/ LOG.debug("Found " + patientIds.size() + " patients"); int done = 0; int fixed = 0; ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, null, null, null, true); //for each patient for (UUID patientUuid: patientIds) { //LOG.debug("Checking patient " + patientUuid); //get all appointment resources List<ResourceWrapper> appointmentWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.Appointment.toString()); for (ResourceWrapper apptWrapper: appointmentWrappers) { //LOG.debug("Checking appointment " + apptWrapper.getResourceId()); List<ResourceWrapper> historyWrappers = resourceDal.getResourceHistory(serviceId, apptWrapper.getResourceType(), apptWrapper.getResourceId()); //the above returns most recent first, but we want to do them in order historyWrappers = Lists.reverse(historyWrappers); for (ResourceWrapper historyWrapper : historyWrappers) { if (historyWrapper.isDeleted()) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " is deleted"); continue; } String json = historyWrapper.getResourceData(); Appointment appt = (Appointment) FhirSerializationHelper.deserializeResource(json); if (!appt.hasSlot()) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " has no slot"); continue; } if (appt.getSlot().size() != 1) { throw new Exception("Appointment " + appt.getId() + " has " + appt.getSlot().size() + " slot refs"); } Reference slotRef = appt.getSlot().get(0); //test if slot reference exists Reference slotLocalRef = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, slotRef); String slotSourceId = ReferenceHelper.getReferenceId(slotLocalRef); if (slotSourceId.indexOf(":") > -1) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " has a valid slot"); continue; } //if not, correct slot reference Reference apptEdsReference = ReferenceHelper.createReference(appt.getResourceType(), appt.getId()); Reference apptLocalReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, apptEdsReference); String sourceId = ReferenceHelper.getReferenceId(apptLocalReference); Reference slotLocalReference = ReferenceHelper.createReference(ResourceType.Slot, sourceId); Reference slotEdsReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(slotLocalReference, csvHelper); String slotEdsReferenceValue = slotEdsReference.getReference(); String oldSlotRefValue = slotRef.getReference(); slotRef.setReference(slotEdsReferenceValue); //LOG.debug("Appointment " + historyWrapper.getResourceId() + " slot ref changed from " + oldSlotRefValue + " to " + slotEdsReferenceValue); //save appointment json = FhirSerializationHelper.serializeResource(appt); historyWrapper.setResourceData(json); saveResourceWrapper(serviceId, historyWrapper); fixed++; } } done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts"); } } LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts"); LOG.info("Finished Fixing Slot References in Appointments for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } } /*private static void fixReviews(String sharedStoragePath, UUID justThisService) { LOG.info("Fixing Reviews using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); Map<String, Long> problemCodes = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); LOG.info("Doing Emis CSV exchange " + exchangeId + " with " + batches.size() + " batches"); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem problemParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (problemParser.nextRecord()) { String patientGuid = problemParser.getPatientGuid(); String observationGuid = problemParser.getObservationGuid(); String key = patientGuid + ":" + observationGuid; if (!problemCodes.containsKey(key)) { problemCodes.put(key, null); } } problemParser.close(); while (observationParser.nextRecord()) { String patientGuid = observationParser.getPatientGuid(); String observationGuid = observationParser.getObservationGuid(); String key = patientGuid + ":" + observationGuid; if (problemCodes.containsKey(key)) { Long codeId = observationParser.getCodeId(); if (codeId == null) { continue; } problemCodes.put(key, codeId); } } observationParser.close(); LOG.info("Found " + problemCodes.size() + " problem codes so far"); String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f); EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId); while (observationParser.nextRecord()) { String problemGuid = observationParser.getProblemGuid(); if (!Strings.isNullOrEmpty(problemGuid)) { String patientGuid = observationParser.getPatientGuid(); Long codeId = observationParser.getCodeId(); if (codeId == null) { continue; } String key = patientGuid + ":" + problemGuid; Long problemCodeId = problemCodes.get(key); if (problemCodeId == null || problemCodeId.longValue() != codeId.longValue()) { continue; } //if here, our code is the same as the problem, so it's a review String locallyUniqueId = patientGuid + ":" + observationParser.getObservationGuid(); ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, helper); for (UUID systemId: systemIds) { UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid); } UUID edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); if (edsObservationId == null) { //try observations as diagnostic reports, because it could be one of those instead if (resourceType == ResourceType.Observation) { resourceType = ResourceType.DiagnosticReport; edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); } if (edsObservationId == null) { throw new Exception("Failed to find observation ID for service " + serviceId + " system " + systemId + " resourceType " + resourceType + " local ID " + locallyUniqueId); } } List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds == null) { //if there are no batches for this patient, we'll be handling this data in another exchange continue; //throw new Exception("Failed to find batch ID for patient " + edsPatientId + " in exchange " + exchangeId + " for resource " + resourceType + " " + edsObservationId); } for (UUID batchId: batchIds) { List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), edsObservationId); if (resourceByExchangeBatches.isEmpty()) { //if we've deleted data, this will be null continue; //throw new Exception("No resources found for batch " + batchId + " resource type " + resourceType + " and resource id " + edsObservationId); } for (ResourceByExchangeBatch resourceByExchangeBatch: resourceByExchangeBatches) { String json = resourceByExchangeBatch.getResourceData(); if (Strings.isNullOrEmpty(json)) { throw new Exception("No JSON in resource " + resourceType + " " + edsObservationId + " in batch " + batchId); } Resource resource = parserPool.parse(json); if (addReviewExtension((DomainResource)resource)) { json = parserPool.composeString(resource); resourceByExchangeBatch.setResourceData(json); LOG.info("Changed " + resourceType + " " + edsObservationId + " to have extension in batch " + batchId); resourceRepository.save(resourceByExchangeBatch); UUID versionUuid = resourceByExchangeBatch.getVersion(); ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(edsObservationId, resourceType.toString(), versionUuid); if (resourceHistory == null) { throw new Exception("Failed to find resource history for " + resourceType + " " + edsObservationId + " and version " + versionUuid); } resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); resourceRepository.save(resourceHistory); ResourceByService resourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType.toString(), edsObservationId); if (resourceByService != null) { UUID serviceVersionUuid = resourceByService.getCurrentVersion(); if (serviceVersionUuid.equals(versionUuid)) { resourceByService.setResourceData(json); resourceRepository.save(resourceByService); } } } else { LOG.info("" + resourceType + " " + edsObservationId + " already has extension"); } } } } //1. find out resource type originall saved from //2. retrieve from resource_by_exchange_batch //3. update resource in resource_by_exchange_batch //4. retrieve from resource_history //5. update resource_history //6. retrieve record from resource_by_service //7. if resource_by_service version UUID matches the resource_history updated, then update that too } } observationParser.close(); } } LOG.info("Finished Fixing Reviews"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static boolean addReviewExtension(DomainResource resource) { if (ExtensionConverter.hasExtension(resource, FhirExtensionUri.IS_REVIEW)) { return false; } Extension extension = ExtensionConverter.createExtension(FhirExtensionUri.IS_REVIEW, new BooleanType(true)); resource.addExtension(extension); return true; }*/ /*private static void runProtocolsForConfidentialPatients(String sharedStoragePath, UUID justThisService) { LOG.info("Running Protocols for Confidential Patients using path " + sharedStoragePath + " and service " + justThisService); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } //once we match the servce, set this to null to do all other services justThisService = null; LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); List<String> interestingPatientGuids = new ArrayList<>(); Map<UUID, Map<UUID, List<UUID>>> batchesPerPatientPerExchange = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } batchesPerPatientPerExchange.put(exchangeId, batchesPerPatient); File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getIsConfidential() || patientParser.getDeleted()) { interestingPatientGuids.add(patientParser.getPatientGuid()); } } patientParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class); while (consultationParser.nextRecord()) { if (consultationParser.getIsConfidential() && !consultationParser.getDeleted()) { interestingPatientGuids.add(consultationParser.getPatientGuid()); } } consultationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { if (observationParser.getIsConfidential() && !observationParser.getDeleted()) { interestingPatientGuids.add(observationParser.getPatientGuid()); } } observationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class); while (diaryParser.nextRecord()) { if (diaryParser.getIsConfidential() && !diaryParser.getDeleted()) { interestingPatientGuids.add(diaryParser.getPatientGuid()); } } diaryParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class); while (drugRecordParser.nextRecord()) { if (drugRecordParser.getIsConfidential() && !drugRecordParser.getDeleted()) { interestingPatientGuids.add(drugRecordParser.getPatientGuid()); } } drugRecordParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class); while (issueRecordParser.nextRecord()) { if (issueRecordParser.getIsConfidential() && !issueRecordParser.getDeleted()) { interestingPatientGuids.add(issueRecordParser.getPatientGuid()); } } issueRecordParser.close(); } Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>(); for (String interestingPatientGuid: interestingPatientGuids) { if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, interestingPatientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + interestingPatientGuid); } for (UUID exchangeId: batchesPerPatientPerExchange.keySet()) { Map<UUID, List<UUID>> batchesPerPatient = batchesPerPatientPerExchange.get(exchangeId); List<UUID> batches = batchesPerPatient.get(edsPatientId); if (batches != null) { Set<UUID> batchesForExchange = exchangeBatchesToPutInProtocolQueue.get(exchangeId); if (batchesForExchange == null) { batchesForExchange = new HashSet<>(); exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchesForExchange); } batchesForExchange.addAll(batches); } } } if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) { //find the config for our protocol queue String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) { Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } } LOG.info("Finished Running Protocols for Confidential Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixOrgs() { LOG.info("Posting orgs to protocol queue"); String[] orgIds = new String[]{ "332f31a2-7b28-47cb-af6f-18f65440d43d", "c893d66b-eb89-4657-9f53-94c5867e7ed9"}; ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); Map<UUID, Set<UUID>> exchangeBatches = new HashMap<>(); for (String orgId: orgIds) { LOG.info("Doing org ID " + orgId); UUID orgUuid = UUID.fromString(orgId); try { //select batch_id from ehr.resource_by_exchange_batch where resource_type = 'Organization' and resource_id = 8f465517-729b-4ad9-b405-92b487047f19 LIMIT 1 ALLOW FILTERING; ResourceByExchangeBatch resourceByExchangeBatch = resourceRepository.getFirstResourceByExchangeBatch(ResourceType.Organization.toString(), orgUuid); UUID batchId = resourceByExchangeBatch.getBatchId(); //select exchange_id from ehr.exchange_batch where batch_id = 1a940e10-1535-11e7-a29d-a90b99186399 LIMIT 1 ALLOW FILTERING; ExchangeBatch exchangeBatch = exchangeBatchRepository.retrieveFirstForBatchId(batchId); UUID exchangeId = exchangeBatch.getExchangeId(); Set<UUID> list = exchangeBatches.get(exchangeId); if (list == null) { list = new HashSet<>(); exchangeBatches.put(exchangeId, list); } list.add(batchId); } catch (Exception ex) { LOG.error("", ex); break; } } try { //find the config for our protocol queue (which is in the inbound config) String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatches.keySet()) { Set<UUID> batchIds = exchangeBatches.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } catch (Exception ex) { LOG.error("", ex); return; } LOG.info("Finished posting orgs to protocol queue"); }*/ /*private static void findCodes() { LOG.info("Finding missing codes"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT service_id, system_id, exchange_id, version FROM audit.exchange_transform_audit ALLOW FILTERING;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID serviceId = row.get(0, UUID.class); UUID systemId = row.get(1, UUID.class); UUID exchangeId = row.get(2, UUID.class); UUID version = row.get(3, UUID.class); ExchangeTransformAudit audit = auditRepository.getExchangeTransformAudit(serviceId, systemId, exchangeId, version); String xml = audit.getErrorXml(); if (xml == null) { continue; } String codePrefix = "Failed to find clinical code CodeableConcept for codeId "; int codeIndex = xml.indexOf(codePrefix); if (codeIndex > -1) { int startIndex = codeIndex + codePrefix.length(); int tagEndIndex = xml.indexOf("<", startIndex); String code = xml.substring(startIndex, tagEndIndex); Service service = serviceRepository.getById(serviceId); String name = service.getName(); LOG.info(name + " clinical code " + code + " from " + audit.getStarted()); continue; } codePrefix = "Failed to find medication CodeableConcept for codeId "; codeIndex = xml.indexOf(codePrefix); if (codeIndex > -1) { int startIndex = codeIndex + codePrefix.length(); int tagEndIndex = xml.indexOf("<", startIndex); String code = xml.substring(startIndex, tagEndIndex); Service service = serviceRepository.getById(serviceId); String name = service.getName(); LOG.info(name + " drug code " + code + " from " + audit.getStarted()); continue; } } LOG.info("Finished finding missing codes"); }*/ private static void createTppSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating TPP Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line: lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createTppSubsetForFile(sourceDir, destDir, personIds); LOG.info("Finished Creating TPP Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createTppSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile: files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } //LOG.info("Doing dir " + sourceFile); createTppSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } Charset encoding = Charset.forName("CP1252"); InputStreamReader reader = new InputStreamReader( new BufferedInputStream( new FileInputStream(sourceFile)), encoding); CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader(); CSVParser parser = new CSVParser(reader, format); String filterColumn = null; Map<String, Integer> headerMap = parser.getHeaderMap(); if (headerMap.containsKey("IDPatient")) { filterColumn = "IDPatient"; } else if (name.equalsIgnoreCase("SRPatient.csv")) { filterColumn = "RowIdentifier"; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( new FileOutputStream(destFile), encoding)); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); /*} else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); copyFile(sourceFile, destFile); }*/ } } } private static void createVisionSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Vision Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line: lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createVisionSubsetForFile(sourceDir, destDir, personIds); LOG.info("Finished Creating Vision Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createVisionSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile: files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createVisionSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; if (name.contains("encounter_data") || name.contains("journal_data") || name.contains("patient_data") || name.contains("referral_data")) { filterColumn = 0; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void createHomertonSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Homerton Subset"); try { Set<String> PersonIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line: lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } PersonIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createHomertonSubsetForFile(sourceDir, destDir, PersonIds); LOG.info("Finished Creating Homerton Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createHomertonSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile: files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createHomertonSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); //fully quote destination file to fix CRLF in columns CSVFormat format = CSVFormat.DEFAULT.withHeader(); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; //PersonId column at 1 if (name.contains("ENCOUNTER") || name.contains("PATIENT")) { filterColumn = 1; } else if (name.contains("DIAGNOSIS")) { //PersonId column at 13 filterColumn = 13; } else if (name.contains("ALLERGY")) { //PersonId column at 2 filterColumn = 2; } else if (name.contains("PROBLEM")) { //PersonId column at 4 filterColumn = 4; } else { //if no patient column, just copy the file (i.e. PROCEDURE) parser.close(); LOG.info("Copying file without PatientId " + sourceFile); copyFile(sourceFile, destFile); continue; } Map<String, Integer> headerMap = parser.getHeaderMap(); String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void createAdastraSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Adastra Subset"); try { Set<String> caseIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line: lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } //adastra extract files are all keyed on caseId caseIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createAdastraSubsetForFile(sourceDir, destDir, caseIds); LOG.info("Finished Creating Adastra Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createAdastraSubsetForFile(File sourceDir, File destDir, Set<String> caseIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile: files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createAdastraSubsetForFile(sourceFile, destFile, caseIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); //fully quote destination file to fix CRLF in columns CSVFormat format = CSVFormat.DEFAULT.withDelimiter('|'); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; //CaseRef column at 0 if (name.contains("NOTES") || name.contains("CASEQUESTIONS") || name.contains("OUTCOMES") || name.contains("CONSULTATION") || name.contains("CLINICALCODES") || name.contains("PRESCRIPTIONS") || name.contains("PATIENT")) { filterColumn = 0; } else if (name.contains("CASE")) { //CaseRef column at 2 filterColumn = 2; } else if (name.contains("PROVIDER")) { //CaseRef column at 7 filterColumn = 7; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String caseId = csvRecord.get(filterColumn); if (caseIds.contains(caseId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void exportFhirToCsv(UUID serviceId, String destinationPath) { try { File dir = new File(destinationPath); if (dir.exists()) { dir.mkdirs(); } Map<String, CSVPrinter> hmPrinters = new HashMap<>(); EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); PreparedStatement ps = connection.prepareStatement("SELECT resource_id, resource_type, resource_data FROM resource_current"); LOG.debug("Running query"); ResultSet rs = ps.executeQuery(); LOG.debug("Got result set"); while (rs.next()) { String id = rs.getString(1); String type = rs.getString(2); String json = rs.getString(3); CSVPrinter printer = hmPrinters.get(type); if (printer == null) { String path = FilenameUtils.concat(dir.getAbsolutePath(), type + ".tsv"); FileWriter fileWriter = new FileWriter(new File(path)); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVFormat format = CSVFormat.DEFAULT .withHeader("resource_id", "resource_json") .withDelimiter('\t') .withEscape((Character) null) .withQuote((Character) null) .withQuoteMode(QuoteMode.MINIMAL); printer = new CSVPrinter(bufferedWriter, format); hmPrinters.put(type, printer); } printer.printRecord(id, json); } for (String type : hmPrinters.keySet()) { CSVPrinter printer = hmPrinters.get(type); printer.flush(); printer.close(); } ps.close(); entityManager.close(); } catch (Throwable t) { LOG.error("", t); } } } /*class ResourceFiler extends FhirResourceFiler { public ResourceFiler(UUID exchangeId, UUID serviceId, UUID systemId, TransformError transformError, List<UUID> batchIdsCreated, int maxFilingThreads) { super(exchangeId, serviceId, systemId, transformError, batchIdsCreated, maxFilingThreads); } private List<Resource> newResources = new ArrayList<>(); public List<Resource> getNewResources() { return newResources; } @Override public void saveAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception { throw new Exception("shouldn't be calling saveAdminResource"); } @Override public void deleteAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception { throw new Exception("shouldn't be calling deleteAdminResource"); } @Override public void savePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception { for (Resource resource: resources) { if (mapIds) { IdHelper.mapIds(getServiceId(), getSystemId(), resource); } newResources.add(resource); } } @Override public void deletePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception { throw new Exception("shouldn't be calling deletePatientResource"); } }*/
Updating routine to handle disabled Emis extracts
src/eds-queuereader/src/main/java/org/endeavourhealth/queuereader/Main.java
Updating routine to handle disabled Emis extracts
Java
apache-2.0
c642cc727f33f5b99f7cad368793603574074b0c
0
ferstl/pedantic-pom-enforcers
/* * Copyright (c) 2012 - 2015 by Stefan Ferstl <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.ferstl.maven.pomenforcers; import java.util.ArrayList; import java.util.Collection; import java.util.List; import com.github.ferstl.maven.pomenforcers.util.CommaSeparatorUtils; import com.google.common.base.Strings; import com.google.common.collect.Sets; import static com.github.ferstl.maven.pomenforcers.PedanticEnforcerRule.stringToEnforcerRule; /** * The compound enforcer aggregates any combination of the available pedantic * enforcer rules. Besides that it is easier to configure than the single rules, * it is also more efficient because it has to parse the POM file of each Maven * module only once. * * <pre> * ### Example * &lt;rules&gt; * &lt;compound implementation=&quot;com.github.ferstl.maven.pomenforcers.CompoundPedanticEnforcer&quot;&gt; * &lt;enforcers&gt;POM_SECTION_ORDER,MODULE_ORDER,DEPENDENCY_MANAGEMENT_ORDER,DEPENDENCY_ORDER,DEPENDENCY_CONFIGURATION,DEPENDENCY_SCOPE,DEPENDENCY_MANAGEMENT_LOCATION,PLUGIN_MANAGEMENT_ORDER,PLUGIN_CONFIGURATION,PLUGIN_MANAGEMENT_LOCATION&lt;/enforcers&gt; * * &lt;!-- POM_SECTION configuration --&gt; * &lt;pomSectionPriorities&gt;groupId,artifactId,version,packaging&lt;/pomSectionPriorities&gt; * * &lt;!-- MODULE_ORDER configuration --&gt; * &lt;moduleOrderIgnores&gt;&gt;dist-deb,dist-rpm&lt;/moduleOrderIgnores&gt; * * &lt;!-- DEPENDENCY_ORDER configuration --&gt; * &lt;dependenciesOrderBy&gt;scope,groupId,artifactId&lt;/dependenciesOrderBy&gt; * &lt;dependenciesScopePriorities&gt;compile,runtime,provided&lt;/dependenciesScopePriorities&gt; * &lt;dependenciesGroupIdPriorities&gt;com.myproject,com.mylibs&lt;/dependenciesGroupIdPriorities&gt; * &lt;dependenciesArtifactIdPriorities&gt;commons-,utils-&lt;/dependenciesArtifactIdPriorities&gt; * * &lt;!-- DEPENDENCY_MANAGEMENT_ORDER configuration --&gt; * &lt;dependencyManagementOrderBy&gt;scope,groupId,artifactId&lt;/dependencyManagementOrderBy&gt; * &lt;dependencyManagementScopePriorities&gt;compile,runtime,provided&lt;/dependencyManagementScopePriorities&gt; * &lt;dependencyManagementGroupIdPriorities&gt;com.myproject,com.mylibs&lt;/dependencyManagementGroupIdPriorities&gt; * &lt;dependencyManagementArtifactIdPriorities&gt;commons-,utils-&lt;/dependencyManagementArtifactIdPriorities&gt; * * &lt;!-- DEPENDENCY_CONFIGURATION configuration --&gt; * &lt;manageDependencyVersions&gt;true&lt;/manageDependencyVersions&gt; * &lt;allowUnmangedProjectVersions&gt;true&lt;/allowUnmangedProjectVersions&gt; * &lt;manageDependencyExclusions&gt;true&lt;/manageDependencyExclusions&gt; * * &lt;!-- DEPENDENCY_SCOPE configuration --&gt; * &lt;compileDependencies&gt;com.example:mylib1,com.example:mylib2&lt;/compileDependencies&gt; * &lt;providedDependencies&gt;javax.servlet:servlet-api&lt;/providedDependencies&gt; * &lt;runtimeDependencies&gt;com.example:myruntimelib&lt;/runtimeDependencies&gt; * &lt;systemDependencies&gt;com.sun:tools&lt;/systemDependencies&gt; * &lt;testDependencies&gt;org.junit:junit,org.hamcrest:hamcrest-library&lt;/testDependencies&gt; * &lt;importDependencies&gt;org.jboss:jboss-as-client&lt;/importDependencies&gt; * * &lt;!-- DEPENDENCY_MANAGEMENT_LOCATION configuration --&gt; * &lt;allowParentPomsForDependencyManagement&gt;true&lt;/allowParentPomsForDependencyManagement&gt; * &lt;dependencyManagingPoms&gt;com.example.myproject:parent,com.example.myproject:subparent&lt;/dependencyManagingPoms&gt; * * &lt;!-- PLUGIN_MANAGEMENT_ORDER configuration --&gt; * &lt;pluginManagementOrderBy&gt;groupId,artifactId&lt;/pluginManagementOrderBy&gt; * &lt;pluginManagementGroupIdPriorities&gt;com.myproject.plugins,com.myproject.testplugins&lt;/pluginManagementGroupIdPriorities&gt; * &lt;pluginManagementArtifactIdPriorities&gt;mytest-,myintegrationtest-&lt;/pluginManagementArtifactIdPriorities&gt; * * &lt;!-- PLUGIN_CONFIGURATION configuration --&gt; * &lt;managePluginVersions&gt;true&lt;/managePluginVersions&gt; * &lt;managePluginConfigurations&gt;true&lt;/managePluginConfigurations&gt; * &lt;managePluginDependencies&gt;true&lt;/managePluginDependencies&gt; * * &lt;!-- PLUGIN_MANAGEMENT_LOCATION configuration --&gt; * &lt;allowParentPomsForPluginManagement&gt;true&lt;/allowParentPomsForPluginManagement&gt; * &lt;pluginManagingPoms&gt;com.myproject:parent-pom&lt;/pluginManagingPoms&gt; * &lt;!-- DEPENDENCY_ELEMENT --&gt; * &lt;dependencyElementOrdering&gt;groupId,artifactid,version&lt;/dependencyElementOrdering&gt; * &lt;checkDependencyElements&gt;true&lt;/heckDependencyElements&gt; * &lt;checkDependencyManagementElements&gt;true&lt;/checkDependencyManagementElements&gt; * &lt;/compound&gt; * &lt;/rules&gt; * </pre> * @id n/a * @since 1.0.0 */ public class CompoundPedanticEnforcer extends AbstractPedanticEnforcer { /** * See {@link PedanticPomSectionOrderEnforcer#setSectionPriorities(String)}. * @configParam * @since 1.0.0 */ private String pomSectionPriorities; /** * See {@link PedanticModuleOrderEnforcer#setIgnoredModules(String)}. * @configParam * @since 1.0.0 */ private String moduleOrderIgnores; /** * See {@link PedanticDependencyOrderEnforcer#setOrderBy(String)}. * @configParam * @since 1.0.0 */ private String dependenciesOrderBy; /** * See {@link PedanticDependencyOrderEnforcer#setGroupIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String dependenciesGroupIdPriorities; /** * See {@link PedanticDependencyOrderEnforcer#setArtifactIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String dependenciesArtifactIdPriorities; /** * See {@link PedanticDependencyOrderEnforcer#setScopePriorities(String)}. * @configParam * @since 1.0.0 */ private String dependenciesScopePriorities; /** * See {@link PedanticDependencyManagementOrderEnforcer#setOrderBy(String)}. * @configParam * @since 1.0.0 */ private String dependencyManagementOrderBy; /** * See * {@link PedanticDependencyManagementOrderEnforcer#setGroupIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String dependencyManagementGroupIdPriorities; /** * See * {@link PedanticDependencyManagementOrderEnforcer#setArtifactIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String dependencyManagementArtifactIdPriorities; /** * See * {@link PedanticDependencyManagementOrderEnforcer#setScopePriorities(String)}. * @configParam * @since 1.0.0 */ private String dependencyManagementScopePriorities; /** * See {@link PedanticDependencyManagementLocationEnforcer#setAllowParentPoms(boolean)}. * @configParam * @since 1.2.0 */ private Boolean allowParentPomsForDependencyManagement; /** * See {@link PedanticDependencyManagementLocationEnforcer#setDependencyManagingPoms(String)}. * @configParam * @since 1.0.0 */ private String dependencyManagingPoms; /** * See * {@link PedanticDependencyConfigurationEnforcer#setManageVersions(boolean)}. * @configParam * @since 1.0.0 */ private Boolean manageDependencyVersions; /** * See * {@link PedanticDependencyConfigurationEnforcer#setAllowUnmanagedProjectVersions(boolean)}. * @configParam * @since 1.0.0 */ private Boolean allowUnmangedProjectVersions; /** * See * {@link PedanticDependencyConfigurationEnforcer#setManageExclusions(boolean)}. * @configParam * @since 1.0.0 */ private Boolean manageDependencyExclusions; /** * See {@link PedanticDependencyScopeEnforcer#setCompileDependencies(String)}. * @configParam * @since 1.0.0 */ private String compileDependencies; /** * See {@link PedanticDependencyScopeEnforcer#setProvidedDependencies(String)}. * @configParam * @since 1.0.0 */ private String providedDependencies; /** * See {@link PedanticDependencyScopeEnforcer#setRuntimeDependencies(String)}. * @configParam * @since 1.0.0 */ private String runtimeDependencies; /** * See {@link PedanticDependencyScopeEnforcer#setSystemDependencies(String)}. * @configParam * @since 1.0.0 */ private String systemDependencies; /** * See {@link PedanticDependencyScopeEnforcer#setTestDependencies(String)}. * @configParam * @since 1.0.0 */ private String testDependencies; /** * See {@link PedanticDependencyScopeEnforcer#setImportDependencies(String)}. * @configParam * @since 1.0.0 */ private String importDependencies; /** * See {@link PedanticPluginManagementOrderEnforcer#setOrderBy(String)}. * @configParam * @since 1.0.0 */ private String pluginManagementOrderBy; /** * See * {@link PedanticPluginManagementOrderEnforcer#setGroupIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String pluginManagementGroupIdPriorities; /** * See * {@link PedanticPluginManagementOrderEnforcer#setArtifactIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String pluginManagementArtifactIdPriorities; /** * See {@link PedanticPluginManagementLocationEnforcer#setAllowParentPoms(boolean)}. * @configParam * @since 1.2.0 */ private Boolean allowParentPomsForPluginManagement; /** * See * {@link PedanticPluginManagementLocationEnforcer#setPluginManagingPoms(String)}. * @configParam * @since 1.0.0 */ private String pluginManagingPoms; /** * See {@link PedanticPluginConfigurationEnforcer#managePluginVersions}. * @configParam * @since 1.0.0 */ private Boolean managePluginVersions; /** * See {@link PedanticPluginConfigurationEnforcer#managePluginConfigurations} * @configParam * @since 1.0.0 */ private Boolean managePluginConfigurations; /** * See {@link PedanticPluginConfigurationEnforcer#managePluginDependencies} * @configParam * @since 1.0.0 */ private Boolean managePluginDependencies; /** * See {@link PedanticDependencyElementEnforcer#elementOrdering}. * * @configParam * @since 1.4.0 */ private String dependencyElementOrdering; /** * See {@link PedanticDependencyElementEnforcer#checkDependencies}. * * @configParam * @since 1.4.0 */ private Boolean checkDependencyElements; /** * See {@link PedanticDependencyElementEnforcer#checkDependencyManagement}. * * @configParam * @since 1.4.0 */ private Boolean checkDependencyManagementElements; /** * Collection of enforcers to execute. */ private final Collection<PedanticEnforcerRule> enforcers; private final PropertyInitializationVisitor propertyInitializer; public CompoundPedanticEnforcer() { this.enforcers = Sets.newLinkedHashSet(); this.propertyInitializer = new PropertyInitializationVisitor(); } public void setEnforcers(String enforcers) { CommaSeparatorUtils.splitAndAddToCollection(enforcers, this.enforcers, stringToEnforcerRule()); } @Override protected PedanticEnforcerRule getDescription() { return PedanticEnforcerRule.COMPOUND; } @Override protected void accept(PedanticEnforcerVisitor visitor) { visitor.visit(this); } @Override protected void doEnforce(ErrorReport report) { report.useLargeTitle(); List<ErrorReport> ruleErrors = new ArrayList<>(); for (PedanticEnforcerRule pedanticEnforcer : this.enforcers) { AbstractPedanticEnforcer rule = pedanticEnforcer.createEnforcerRule(); rule.initialize(getHelper(), getPom(), getProjectModel()); rule.accept(this.propertyInitializer); ErrorReport ruleReport = new ErrorReport(rule.getDescription()); rule.doEnforce(ruleReport); if (ruleReport.hasErrors()) { ruleErrors.add(ruleReport); } } collectErrors(report, ruleErrors); } private void collectErrors(ErrorReport compundReport, List<ErrorReport> ruleErrors) { if (!ruleErrors.isEmpty()) { compundReport .useLargeTitle() .addLine("Please fix these problems:") .emptyLine(); for (ErrorReport ruleError : ruleErrors) { compundReport.addLine(ruleError.toString()).emptyLine().emptyLine(); } } } private class PropertyInitializationVisitor implements PedanticEnforcerVisitor { @Override public void visit(PedanticPomSectionOrderEnforcer enforcer) { if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.pomSectionPriorities)) { enforcer.setSectionPriorities(CompoundPedanticEnforcer.this.pomSectionPriorities); } } @Override public void visit(PedanticModuleOrderEnforcer enforcer) { if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.moduleOrderIgnores)) { enforcer.setIgnoredModules(CompoundPedanticEnforcer.this.moduleOrderIgnores); } } @Override public void visit(PedanticDependencyManagementOrderEnforcer enforcer) { if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependencyManagementOrderBy)) { enforcer.setOrderBy(CompoundPedanticEnforcer.this.dependencyManagementOrderBy); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependencyManagementGroupIdPriorities)) { enforcer.setGroupIdPriorities(CompoundPedanticEnforcer.this.dependencyManagementGroupIdPriorities); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependencyManagementArtifactIdPriorities)) { enforcer.setArtifactIdPriorities(CompoundPedanticEnforcer.this.dependencyManagementArtifactIdPriorities); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependencyManagementScopePriorities)) { enforcer.setScopePriorities(CompoundPedanticEnforcer.this.dependencyManagementScopePriorities); } } @Override public void visit(PedanticDependencyManagementLocationEnforcer enforcer) { if(CompoundPedanticEnforcer.this.allowParentPomsForDependencyManagement != null) { enforcer.setAllowParentPoms(CompoundPedanticEnforcer.this.allowParentPomsForDependencyManagement); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependencyManagingPoms)) { enforcer.setDependencyManagingPoms(CompoundPedanticEnforcer.this.dependencyManagingPoms); } } @Override public void visit(PedanticDependencyOrderEnforcer enforcer) { if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependenciesOrderBy)) { enforcer.setOrderBy(CompoundPedanticEnforcer.this.dependenciesOrderBy); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependenciesGroupIdPriorities)) { enforcer.setGroupIdPriorities(CompoundPedanticEnforcer.this.dependenciesGroupIdPriorities); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependenciesArtifactIdPriorities)) { enforcer.setArtifactIdPriorities(CompoundPedanticEnforcer.this.dependenciesArtifactIdPriorities); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependenciesScopePriorities)) { enforcer.setScopePriorities(CompoundPedanticEnforcer.this.dependenciesScopePriorities); } } @Override public void visit(PedanticDependencyConfigurationEnforcer dependencyConfigurationEnforcer) { if (CompoundPedanticEnforcer.this.manageDependencyVersions != null) { dependencyConfigurationEnforcer.setManageVersions(CompoundPedanticEnforcer.this.manageDependencyVersions); } if (CompoundPedanticEnforcer.this.allowUnmangedProjectVersions != null) { dependencyConfigurationEnforcer.setAllowUnmanagedProjectVersions( CompoundPedanticEnforcer.this.allowUnmangedProjectVersions); } if (CompoundPedanticEnforcer.this.manageDependencyExclusions != null) { dependencyConfigurationEnforcer.setManageExclusions(CompoundPedanticEnforcer.this.manageDependencyExclusions); } } @Override public void visit(PedanticDependencyScopeEnforcer dependencyScopeEnforcer) { if (CompoundPedanticEnforcer.this.compileDependencies != null) { dependencyScopeEnforcer.setCompileDependencies(CompoundPedanticEnforcer.this.compileDependencies); } if (CompoundPedanticEnforcer.this.providedDependencies != null) { dependencyScopeEnforcer.setProvidedDependencies(CompoundPedanticEnforcer.this.providedDependencies); } if (CompoundPedanticEnforcer.this.runtimeDependencies != null) { dependencyScopeEnforcer.setRuntimeDependencies(CompoundPedanticEnforcer.this.runtimeDependencies); } if (CompoundPedanticEnforcer.this.systemDependencies != null) { dependencyScopeEnforcer.setSystemDependencies(CompoundPedanticEnforcer.this.systemDependencies); } if (CompoundPedanticEnforcer.this.testDependencies != null) { dependencyScopeEnforcer.setTestDependencies(CompoundPedanticEnforcer.this.testDependencies); } if (CompoundPedanticEnforcer.this.importDependencies != null) { dependencyScopeEnforcer.setImportDependencies(CompoundPedanticEnforcer.this.importDependencies); } } @Override public void visit(PedanticPluginManagementOrderEnforcer enforcer) { if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.pluginManagementOrderBy)) { enforcer.setOrderBy(CompoundPedanticEnforcer.this.pluginManagementOrderBy); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.pluginManagementGroupIdPriorities)) { enforcer.setGroupIdPriorities(CompoundPedanticEnforcer.this.pluginManagementGroupIdPriorities); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.pluginManagementArtifactIdPriorities)) { enforcer.setArtifactIdPriorities(CompoundPedanticEnforcer.this.pluginManagementArtifactIdPriorities); } } @Override public void visit(PedanticPluginConfigurationEnforcer enforcer) { if (CompoundPedanticEnforcer.this.managePluginVersions != null) { enforcer.setManageVersions(CompoundPedanticEnforcer.this.managePluginVersions); } if (CompoundPedanticEnforcer.this.managePluginConfigurations != null) { enforcer.setManageConfigurations(CompoundPedanticEnforcer.this.managePluginConfigurations); } if (CompoundPedanticEnforcer.this.managePluginDependencies != null) { enforcer.setManageDependencies(CompoundPedanticEnforcer.this.managePluginDependencies); } } @Override public void visit(PedanticPluginManagementLocationEnforcer enforcer) { if (CompoundPedanticEnforcer.this.allowParentPomsForPluginManagement != null) { enforcer.setAllowParentPoms(CompoundPedanticEnforcer.this.allowParentPomsForPluginManagement); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.pluginManagingPoms)) { enforcer.setPluginManagingPoms(CompoundPedanticEnforcer.this.pluginManagingPoms); } } @Override public void visit(PedanticDependencyElementEnforcer enforcer) { if (CompoundPedanticEnforcer.this.dependencyElementOrdering != null) { enforcer.setElementPriorities(CompoundPedanticEnforcer.this.dependencyElementOrdering); } if (CompoundPedanticEnforcer.this.checkDependencyElements != null) { enforcer.setCheckDependencies(CompoundPedanticEnforcer.this.checkDependencyElements); } if (CompoundPedanticEnforcer.this.checkDependencyManagementElements != null) { enforcer.setCheckDependencyManagement(CompoundPedanticEnforcer.this.checkDependencyManagementElements); } } @Override public void visit(CompoundPedanticEnforcer enforcer) { // nothing to do. } } }
src/main/java/com/github/ferstl/maven/pomenforcers/CompoundPedanticEnforcer.java
/* * Copyright (c) 2012 - 2015 by Stefan Ferstl <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.ferstl.maven.pomenforcers; import java.util.ArrayList; import java.util.Collection; import java.util.List; import com.github.ferstl.maven.pomenforcers.util.CommaSeparatorUtils; import com.google.common.base.Strings; import com.google.common.collect.Sets; import static com.github.ferstl.maven.pomenforcers.PedanticEnforcerRule.stringToEnforcerRule; /** * The compound enforcer aggregates any combination of the available pedantic * enforcer rules. Besides that it is easier to configure than the single rules, * it is also more efficient because it has to parse the POM file of each Maven * module only once. * * <pre> * ### Example * &lt;rules&gt; * &lt;compound implementation=&quot;com.github.ferstl.maven.pomenforcers.CompoundPedanticEnforcer&quot;&gt; * &lt;enforcers&gt;POM_SECTION_ORDER,MODULE_ORDER,DEPENDENCY_MANAGEMENT_ORDER,DEPENDENCY_ORDER,DEPENDENCY_CONFIGURATION,DEPENDENCY_SCOPE,DEPENDENCY_MANAGEMENT_LOCATION,PLUGIN_MANAGEMENT_ORDER,PLUGIN_CONFIGURATION,PLUGIN_MANAGEMENT_LOCATION&lt;/enforcers&gt; * * &lt;!-- POM_SECTION configuration --&gt; * &lt;pomSectionPriorities&gt;groupId,artifactId,version,packaging&lt;/pomSectionPriorities&gt; * * &lt;!-- MODULE_ORDER configuration --&gt; * &lt;moduleOrderIgnores&gt;&gt;dist-deb,dist-rpm&lt;/moduleOrderIgnores&gt; * * &lt;!-- DEPENDENCY_ORDER configuration --&gt; * &lt;dependenciesOrderBy&gt;scope,groupId,artifactId&lt;/dependenciesOrderBy&gt; * &lt;dependenciesScopePriorities&gt;compile,runtime,provided&lt;/dependenciesScopePriorities&gt; * &lt;dependenciesGroupIdPriorities&gt;com.myproject,com.mylibs&lt;/dependenciesGroupIdPriorities&gt; * &lt;dependenciesArtifactIdPriorities&gt;commons-,utils-&lt;/dependenciesArtifactIdPriorities&gt; * * &lt;!-- DEPENDENCY_MANAGEMENT_ORDER configuration --&gt; * &lt;dependencyManagementOrderBy&gt;scope,groupId,artifactId&lt;/dependencyManagementOrderBy&gt; * &lt;dependencyManagementScopePriorities&gt;compile,runtime,provided&lt;/dependencyManagementScopePriorities&gt; * &lt;dependencyManagementGroupIdPriorities&gt;com.myproject,com.mylibs&lt;/dependencyManagementGroupIdPriorities&gt; * &lt;dependencyManagementArtifactIdPriorities&gt;commons-,utils-&lt;/dependencyManagementArtifactIdPriorities&gt; * * &lt;!-- DEPENDENCY_CONFIGURATION configuration --&gt; * &lt;manageDependencyVersions&gt;true&lt;/manageDependencyVersions&gt; * &lt;allowUnmangedProjectVersions&gt;true&lt;/allowUnmangedProjectVersions&gt; * &lt;manageDependencyExclusions&gt;true&lt;/manageDependencyExclusions&gt; * * &lt;!-- DEPENDENCY_SCOPE configuration --&gt; * &lt;compileDependencies&gt;com.example:mylib1,com.example:mylib2&lt;/compileDependencies&gt; * &lt;providedDependencies&gt;javax.servlet:servlet-api&lt;/providedDependencies&gt; * &lt;runtimeDependencies&gt;com.example:myruntimelib&lt;/runtimeDependencies&gt; * &lt;systemDependencies&gt;com.sun:tools&lt;/systemDependencies&gt; * &lt;testDependencies&gt;org.junit:junit,org.hamcrest:hamcrest-library&lt;/testDependencies&gt; * &lt;importDependencies&gt;org.jboss:jboss-as-client&lt;/importDependencies&gt; * * &lt;!-- DEPENDENCY_MANAGEMENT_LOCATION configuration --&gt; * &lt;allowParentPomsForDependencyManagement&gt;true&lt;/allowParentPomsForDependencyManagement&gt; * &lt;dependencyManagingPoms&gt;com.example.myproject:parent,com.example.myproject:subparent&lt;/dependencyManagingPoms&gt; * * &lt;!-- PLUGIN_MANAGEMENT_ORDER configuration --&gt; * &lt;pluginManagementOrderBy&gt;groupId,artifactId&lt;/pluginManagementOrderBy&gt; * &lt;pluginManagementGroupIdPriorities&gt;com.myproject.plugins,com.myproject.testplugins&lt;/pluginManagementGroupIdPriorities&gt; * &lt;pluginManagementArtifactIdPriorities&gt;mytest-,myintegrationtest-&lt;/pluginManagementArtifactIdPriorities&gt; * * &lt;!-- PLUGIN_CONFIGURATION configuration --&gt; * &lt;managePluginVersions&gt;true&lt;/managePluginVersions&gt; * &lt;managePluginConfigurations&gt;true&lt;/managePluginConfigurations&gt; * &lt;managePluginDependencies&gt;true&lt;/managePluginDependencies&gt; * * &lt;!-- PLUGIN_MANAGEMENT_LOCATION configuration --&gt; * &lt;allowParentPomsForPluginManagement&gt;true&lt;/allowParentPomsForPluginManagement&gt; * &lt;pluginManagingPoms&gt;com.myproject:parent-pom&lt;/pluginManagingPoms&gt; * &lt;/compound&gt; * &lt;/rules&gt; * </pre> * @id n/a * @since 1.0.0 */ public class CompoundPedanticEnforcer extends AbstractPedanticEnforcer { /** * See {@link PedanticPomSectionOrderEnforcer#setSectionPriorities(String)}. * @configParam * @since 1.0.0 */ private String pomSectionPriorities; /** * See {@link PedanticModuleOrderEnforcer#setIgnoredModules(String)}. * @configParam * @since 1.0.0 */ private String moduleOrderIgnores; /** * See {@link PedanticDependencyOrderEnforcer#setOrderBy(String)}. * @configParam * @since 1.0.0 */ private String dependenciesOrderBy; /** * See {@link PedanticDependencyOrderEnforcer#setGroupIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String dependenciesGroupIdPriorities; /** * See {@link PedanticDependencyOrderEnforcer#setArtifactIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String dependenciesArtifactIdPriorities; /** * See {@link PedanticDependencyOrderEnforcer#setScopePriorities(String)}. * @configParam * @since 1.0.0 */ private String dependenciesScopePriorities; /** * See {@link PedanticDependencyManagementOrderEnforcer#setOrderBy(String)}. * @configParam * @since 1.0.0 */ private String dependencyManagementOrderBy; /** * See * {@link PedanticDependencyManagementOrderEnforcer#setGroupIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String dependencyManagementGroupIdPriorities; /** * See * {@link PedanticDependencyManagementOrderEnforcer#setArtifactIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String dependencyManagementArtifactIdPriorities; /** * See * {@link PedanticDependencyManagementOrderEnforcer#setScopePriorities(String)}. * @configParam * @since 1.0.0 */ private String dependencyManagementScopePriorities; /** * See {@link PedanticDependencyManagementLocationEnforcer#setAllowParentPoms(boolean)}. * @configParam * @since 1.2.0 */ private Boolean allowParentPomsForDependencyManagement; /** * See {@link PedanticDependencyManagementLocationEnforcer#setDependencyManagingPoms(String)}. * @configParam * @since 1.0.0 */ private String dependencyManagingPoms; /** * See * {@link PedanticDependencyConfigurationEnforcer#setManageVersions(boolean)}. * @configParam * @since 1.0.0 */ private Boolean manageDependencyVersions; /** * See * {@link PedanticDependencyConfigurationEnforcer#setAllowUnmanagedProjectVersions(boolean)}. * @configParam * @since 1.0.0 */ private Boolean allowUnmangedProjectVersions; /** * See * {@link PedanticDependencyConfigurationEnforcer#setManageExclusions(boolean)}. * @configParam * @since 1.0.0 */ private Boolean manageDependencyExclusions; /** * See {@link PedanticDependencyScopeEnforcer#setCompileDependencies(String)}. * @configParam * @since 1.0.0 */ private String compileDependencies; /** * See {@link PedanticDependencyScopeEnforcer#setProvidedDependencies(String)}. * @configParam * @since 1.0.0 */ private String providedDependencies; /** * See {@link PedanticDependencyScopeEnforcer#setRuntimeDependencies(String)}. * @configParam * @since 1.0.0 */ private String runtimeDependencies; /** * See {@link PedanticDependencyScopeEnforcer#setSystemDependencies(String)}. * @configParam * @since 1.0.0 */ private String systemDependencies; /** * See {@link PedanticDependencyScopeEnforcer#setTestDependencies(String)}. * @configParam * @since 1.0.0 */ private String testDependencies; /** * See {@link PedanticDependencyScopeEnforcer#setImportDependencies(String)}. * @configParam * @since 1.0.0 */ private String importDependencies; /** * See {@link PedanticPluginManagementOrderEnforcer#setOrderBy(String)}. * @configParam * @since 1.0.0 */ private String pluginManagementOrderBy; /** * See * {@link PedanticPluginManagementOrderEnforcer#setGroupIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String pluginManagementGroupIdPriorities; /** * See * {@link PedanticPluginManagementOrderEnforcer#setArtifactIdPriorities(String)}. * @configParam * @since 1.0.0 */ private String pluginManagementArtifactIdPriorities; /** * See {@link PedanticPluginManagementLocationEnforcer#setAllowParentPoms(boolean)}. * @configParam * @since 1.2.0 */ private Boolean allowParentPomsForPluginManagement; /** * See * {@link PedanticPluginManagementLocationEnforcer#setPluginManagingPoms(String)}. * @configParam * @since 1.0.0 */ private String pluginManagingPoms; /** * See {@link PedanticPluginConfigurationEnforcer#managePluginVersions}. * @configParam * @since 1.0.0 */ private Boolean managePluginVersions; /** * See {@link PedanticPluginConfigurationEnforcer#managePluginConfigurations} * @configParam * @since 1.0.0 */ private Boolean managePluginConfigurations; /** * See {@link PedanticPluginConfigurationEnforcer#managePluginDependencies} * @configParam * @since 1.0.0 */ private Boolean managePluginDependencies; /** * See {@link PedanticDependencyElementEnforcer#elementOrdering}. * * @configParam * @since 1.4.0 */ private String dependencyElementOrdering; /** * See {@link PedanticDependencyElementEnforcer#checkDependencies}. * * @configParam * @since 1.4.0 */ private Boolean checkDependencyElements; /** * See {@link PedanticDependencyElementEnforcer#checkDependencyManagement}. * * @configParam * @since 1.4.0 */ private Boolean checkDependencyManagementElements; /** * Collection of enforcers to execute. */ private final Collection<PedanticEnforcerRule> enforcers; private final PropertyInitializationVisitor propertyInitializer; public CompoundPedanticEnforcer() { this.enforcers = Sets.newLinkedHashSet(); this.propertyInitializer = new PropertyInitializationVisitor(); } public void setEnforcers(String enforcers) { CommaSeparatorUtils.splitAndAddToCollection(enforcers, this.enforcers, stringToEnforcerRule()); } @Override protected PedanticEnforcerRule getDescription() { return PedanticEnforcerRule.COMPOUND; } @Override protected void accept(PedanticEnforcerVisitor visitor) { visitor.visit(this); } @Override protected void doEnforce(ErrorReport report) { report.useLargeTitle(); List<ErrorReport> ruleErrors = new ArrayList<>(); for (PedanticEnforcerRule pedanticEnforcer : this.enforcers) { AbstractPedanticEnforcer rule = pedanticEnforcer.createEnforcerRule(); rule.initialize(getHelper(), getPom(), getProjectModel()); rule.accept(this.propertyInitializer); ErrorReport ruleReport = new ErrorReport(rule.getDescription()); rule.doEnforce(ruleReport); if (ruleReport.hasErrors()) { ruleErrors.add(ruleReport); } } collectErrors(report, ruleErrors); } private void collectErrors(ErrorReport compundReport, List<ErrorReport> ruleErrors) { if (!ruleErrors.isEmpty()) { compundReport .useLargeTitle() .addLine("Please fix these problems:") .emptyLine(); for (ErrorReport ruleError : ruleErrors) { compundReport.addLine(ruleError.toString()).emptyLine().emptyLine(); } } } private class PropertyInitializationVisitor implements PedanticEnforcerVisitor { @Override public void visit(PedanticPomSectionOrderEnforcer enforcer) { if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.pomSectionPriorities)) { enforcer.setSectionPriorities(CompoundPedanticEnforcer.this.pomSectionPriorities); } } @Override public void visit(PedanticModuleOrderEnforcer enforcer) { if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.moduleOrderIgnores)) { enforcer.setIgnoredModules(CompoundPedanticEnforcer.this.moduleOrderIgnores); } } @Override public void visit(PedanticDependencyManagementOrderEnforcer enforcer) { if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependencyManagementOrderBy)) { enforcer.setOrderBy(CompoundPedanticEnforcer.this.dependencyManagementOrderBy); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependencyManagementGroupIdPriorities)) { enforcer.setGroupIdPriorities(CompoundPedanticEnforcer.this.dependencyManagementGroupIdPriorities); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependencyManagementArtifactIdPriorities)) { enforcer.setArtifactIdPriorities(CompoundPedanticEnforcer.this.dependencyManagementArtifactIdPriorities); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependencyManagementScopePriorities)) { enforcer.setScopePriorities(CompoundPedanticEnforcer.this.dependencyManagementScopePriorities); } } @Override public void visit(PedanticDependencyManagementLocationEnforcer enforcer) { if(CompoundPedanticEnforcer.this.allowParentPomsForDependencyManagement != null) { enforcer.setAllowParentPoms(CompoundPedanticEnforcer.this.allowParentPomsForDependencyManagement); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependencyManagingPoms)) { enforcer.setDependencyManagingPoms(CompoundPedanticEnforcer.this.dependencyManagingPoms); } } @Override public void visit(PedanticDependencyOrderEnforcer enforcer) { if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependenciesOrderBy)) { enforcer.setOrderBy(CompoundPedanticEnforcer.this.dependenciesOrderBy); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependenciesGroupIdPriorities)) { enforcer.setGroupIdPriorities(CompoundPedanticEnforcer.this.dependenciesGroupIdPriorities); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependenciesArtifactIdPriorities)) { enforcer.setArtifactIdPriorities(CompoundPedanticEnforcer.this.dependenciesArtifactIdPriorities); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.dependenciesScopePriorities)) { enforcer.setScopePriorities(CompoundPedanticEnforcer.this.dependenciesScopePriorities); } } @Override public void visit(PedanticDependencyConfigurationEnforcer dependencyConfigurationEnforcer) { if (CompoundPedanticEnforcer.this.manageDependencyVersions != null) { dependencyConfigurationEnforcer.setManageVersions(CompoundPedanticEnforcer.this.manageDependencyVersions); } if (CompoundPedanticEnforcer.this.allowUnmangedProjectVersions != null) { dependencyConfigurationEnforcer.setAllowUnmanagedProjectVersions( CompoundPedanticEnforcer.this.allowUnmangedProjectVersions); } if (CompoundPedanticEnforcer.this.manageDependencyExclusions != null) { dependencyConfigurationEnforcer.setManageExclusions(CompoundPedanticEnforcer.this.manageDependencyExclusions); } } @Override public void visit(PedanticDependencyScopeEnforcer dependencyScopeEnforcer) { if (CompoundPedanticEnforcer.this.compileDependencies != null) { dependencyScopeEnforcer.setCompileDependencies(CompoundPedanticEnforcer.this.compileDependencies); } if (CompoundPedanticEnforcer.this.providedDependencies != null) { dependencyScopeEnforcer.setProvidedDependencies(CompoundPedanticEnforcer.this.providedDependencies); } if (CompoundPedanticEnforcer.this.runtimeDependencies != null) { dependencyScopeEnforcer.setRuntimeDependencies(CompoundPedanticEnforcer.this.runtimeDependencies); } if (CompoundPedanticEnforcer.this.systemDependencies != null) { dependencyScopeEnforcer.setSystemDependencies(CompoundPedanticEnforcer.this.systemDependencies); } if (CompoundPedanticEnforcer.this.testDependencies != null) { dependencyScopeEnforcer.setTestDependencies(CompoundPedanticEnforcer.this.testDependencies); } if (CompoundPedanticEnforcer.this.importDependencies != null) { dependencyScopeEnforcer.setImportDependencies(CompoundPedanticEnforcer.this.importDependencies); } } @Override public void visit(PedanticPluginManagementOrderEnforcer enforcer) { if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.pluginManagementOrderBy)) { enforcer.setOrderBy(CompoundPedanticEnforcer.this.pluginManagementOrderBy); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.pluginManagementGroupIdPriorities)) { enforcer.setGroupIdPriorities(CompoundPedanticEnforcer.this.pluginManagementGroupIdPriorities); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.pluginManagementArtifactIdPriorities)) { enforcer.setArtifactIdPriorities(CompoundPedanticEnforcer.this.pluginManagementArtifactIdPriorities); } } @Override public void visit(PedanticPluginConfigurationEnforcer enforcer) { if (CompoundPedanticEnforcer.this.managePluginVersions != null) { enforcer.setManageVersions(CompoundPedanticEnforcer.this.managePluginVersions); } if (CompoundPedanticEnforcer.this.managePluginConfigurations != null) { enforcer.setManageConfigurations(CompoundPedanticEnforcer.this.managePluginConfigurations); } if (CompoundPedanticEnforcer.this.managePluginDependencies != null) { enforcer.setManageDependencies(CompoundPedanticEnforcer.this.managePluginDependencies); } } @Override public void visit(PedanticPluginManagementLocationEnforcer enforcer) { if (CompoundPedanticEnforcer.this.allowParentPomsForPluginManagement != null) { enforcer.setAllowParentPoms(CompoundPedanticEnforcer.this.allowParentPomsForPluginManagement); } if (!Strings.isNullOrEmpty(CompoundPedanticEnforcer.this.pluginManagingPoms)) { enforcer.setPluginManagingPoms(CompoundPedanticEnforcer.this.pluginManagingPoms); } } @Override public void visit(PedanticDependencyElementEnforcer enforcer) { if (CompoundPedanticEnforcer.this.dependencyElementOrdering != null) { enforcer.setElementPriorities(CompoundPedanticEnforcer.this.dependencyElementOrdering); } if (CompoundPedanticEnforcer.this.checkDependencyElements != null) { enforcer.setCheckDependencies(CompoundPedanticEnforcer.this.checkDependencyElements); } if (CompoundPedanticEnforcer.this.checkDependencyManagementElements != null) { enforcer.setCheckDependencyManagement(CompoundPedanticEnforcer.this.checkDependencyManagementElements); } } @Override public void visit(CompoundPedanticEnforcer enforcer) { // nothing to do. } } }
#19 Integrate new config options into com
src/main/java/com/github/ferstl/maven/pomenforcers/CompoundPedanticEnforcer.java
#19 Integrate new config options into com
Java
apache-2.0
beb5b73264049f9943f53a6337516f19d3347d72
0
lesaint/damapping
/** * Copyright (C) 2013 Sébastien Lesaint (http://www.javatronic.fr/) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fr.javatronic.damapping.util; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * Preconditions - Partial clone of Guava's Preconditions class * * @author Sébastien Lesaint */ public final class Preconditions { private static final String NPE_DEFAULT_MSG = "object can not be null"; private static final String IAE_DEFAULT_MSG = "Argument is not valid"; private Preconditions() { // prevents instantiation } /** * Throws a NullPointerException with a generic message if the specified object is {@code null}, otherwise * returns it. * * @param obj an object of any type or {@code null} * @param <T> any type * * @return the argument */ @Nonnull public static <T> T checkNotNull(@Nullable T obj) { return checkNotNull(obj, NPE_DEFAULT_MSG); } /** * Throws a NullPointerException with the specified message if the specified object is {@code null}, otherwise * returns it. * <p> * A default message will be used if the specified message is {@code null} or empty. * </p> * * @param obj an object of any type or {@code null} * @param message a {@link String} or {@code null} * @param <T> any type * * @return the argument */ @Nonnull public static <T> T checkNotNull(@Nullable T obj, @Nullable String message) { if (obj == null) { throw new NullPointerException(message == null || message.isEmpty() ? NPE_DEFAULT_MSG : message); } return obj; } /** * Throws a {@link IllegalArgumentException} with a generic message if the specified boolean value is false. * * @param test a boolean value */ public static void checkArgument(boolean test) { if (!test) { throw new IllegalArgumentException(IAE_DEFAULT_MSG); } } /** * Throws a {@link IllegalArgumentException} with the specified message if the specified boolean * value is false. * <p> * A default message will be used if the specified message is {@code null} or empty. * </p> * * @param test a boolean value * @param message a {@link String} or {@code null} */ public static void checkArgument(boolean test, @Nullable String message) { if (!test) { throw new IllegalArgumentException(message == null || message.isEmpty() ? IAE_DEFAULT_MSG : message); } } }
core-parent/util/src/main/java/fr/javatronic/damapping/util/Preconditions.java
/** * Copyright (C) 2013 Sébastien Lesaint (http://www.javatronic.fr/) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fr.javatronic.damapping.util; /** * Preconditions - Partial clone of Guava's Preconditions class * * @author Sébastien Lesaint */ public final class Preconditions { private Preconditions() { // prevents instantiation } public static <T> T checkNotNull(T obj) { return checkNotNull(obj, "object can not be null"); } public static <T> void checkArgument(boolean test) { if (!test) { throw new IllegalArgumentException("Argument is not valid"); } } public static <T> T checkNotNull(T obj, String message) { if (obj == null) { throw new NullPointerException(message); } return obj; } }
[#52] add Preconditions.checkArgumen(boolean, String) add method checkArgument which supports specifying the IllegalArgumentException message add javadoc on methods
core-parent/util/src/main/java/fr/javatronic/damapping/util/Preconditions.java
[#52] add Preconditions.checkArgumen(boolean, String)
Java
apache-2.0
95dda14ed7ed3749b375285bebb1f3d6539e2b3d
0
opencb/cellbase,opencb/cellbase,opencb/cellbase,opencb/cellbase,opencb/cellbase,opencb/cellbase,opencb/cellbase
/* * Copyright 2015-2020 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.cellbase.lib.indexer; import org.apache.commons.lang3.StringUtils; import org.opencb.cellbase.core.common.Species; import org.opencb.cellbase.core.config.CellBaseConfiguration; import org.opencb.cellbase.core.exception.CellbaseException; import org.opencb.cellbase.lib.SpeciesUtils; import org.opencb.cellbase.lib.impl.core.MongoDBAdaptorFactory; import org.opencb.commons.datastore.mongodb.MongoDBIndexUtils; import org.opencb.commons.datastore.mongodb.MongoDataStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; public class IndexManager { private CellBaseConfiguration configuration; private Logger logger; public IndexManager(CellBaseConfiguration configuration) { this.configuration = configuration; logger = LoggerFactory.getLogger(this.getClass()); } /** * Create indexes. Exception thrown if species or assembly is incorrect. NULL assembly value will default to * first assembly in the config file. * * @param data list of collections to index * @param speciesName name of species * @param assemblyName name of assembly * @param dropIndexesFirst if TRUE, deletes the index before creating a new one. FALSE, no index is created if it * already exists. * @throws IOException if configuration file can't be read * @throws CellbaseException if indexes file isn't found, or invalid input */ public void createMongoDBIndexes(String data, String speciesName, String assemblyName, boolean dropIndexesFirst) throws CellbaseException, IOException { Species species = SpeciesUtils.getSpecies(configuration, speciesName, assemblyName); if (StringUtils.isEmpty(data) || "all".equalsIgnoreCase(data)) { createMongoDBIndexes(new String[0], species.getSpecies(), species.getAssembly(), dropIndexesFirst); } else { String[] collections = data.split(","); createMongoDBIndexes(collections, species.getSpecies(), species.getAssembly(), dropIndexesFirst); } } /** * Create indexes for specified collection. Use by the load to create indexes. Will throw an exception if * given database does not already exist. * * @param collectionName create indexes for this collection, can be "all" or a list of collection names * @param databaseName name of database * @param dropIndexesFirst if TRUE, deletes the index before creating a new one. FALSE, no index is created if it * already exists. * @throws IOException if configuration file can't be read * @throws CellbaseException if indexes file isn't found */ public void createMongoDBIndexes(String collectionName, String databaseName, boolean dropIndexesFirst) throws IOException, CellbaseException { InputStream resourceAsStream = IndexManager.class.getResourceAsStream("/mongodb-indexes.json"); if (resourceAsStream == null) { throw new CellbaseException("Index file mongodb-indexes.json not found"); } MongoDBAdaptorFactory factory = new MongoDBAdaptorFactory(configuration); MongoDataStore mongoDataStore = factory.getMongoDBDatastore(databaseName); if (StringUtils.isEmpty(collectionName) || "all".equalsIgnoreCase(collectionName)) { MongoDBIndexUtils.createAllIndexes(mongoDataStore, resourceAsStream, dropIndexesFirst); } else { String[] collections = collectionName.split(","); for (String collection : collections) { MongoDBIndexUtils.createIndexes(mongoDataStore, resourceAsStream, collection, dropIndexesFirst); } } } private void createMongoDBIndexes(String[] collections, String species, String assembly, boolean dropIndexesFirst) throws IOException, CellbaseException { InputStream resourceAsStream = IndexManager.class.getResourceAsStream("/mongodb-indexes.json"); if (resourceAsStream == null) { throw new CellbaseException("Index file mongodb-indexes.json not found"); } MongoDBAdaptorFactory factory = new MongoDBAdaptorFactory(configuration); MongoDataStore mongoDataStore = factory.getMongoDBDatastore(species, assembly); if (collections == null || collections.length == 0) { MongoDBIndexUtils.createAllIndexes(mongoDataStore, resourceAsStream, dropIndexesFirst); } else { for (String collectionName : collections) { try { MongoDBIndexUtils.createIndexes(mongoDataStore, resourceAsStream, collectionName, dropIndexesFirst); } catch (NullPointerException e) { throw new CellbaseException("Error creating an index for collection '" + collectionName + "', collection does not exist"); } } } } }
cellbase-lib/src/main/java/org/opencb/cellbase/lib/indexer/IndexManager.java
/* * Copyright 2015-2020 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.cellbase.lib.indexer; import org.apache.commons.lang3.StringUtils; import org.opencb.cellbase.core.common.Species; import org.opencb.cellbase.core.config.CellBaseConfiguration; import org.opencb.cellbase.core.exception.CellbaseException; import org.opencb.cellbase.lib.SpeciesUtils; import org.opencb.cellbase.lib.impl.core.MongoDBAdaptorFactory; import org.opencb.commons.datastore.mongodb.MongoDBIndexUtils; import org.opencb.commons.datastore.mongodb.MongoDataStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; public class IndexManager { private CellBaseConfiguration configuration; private Logger logger; public IndexManager(CellBaseConfiguration configuration) { this.configuration = configuration; logger = LoggerFactory.getLogger(this.getClass()); } /** * Create indexes. Exception thrown if species or assembly is incorrect. NULL assembly value will default to * first assembly in the config file. * * @param data list of collections to index * @param speciesName name of species * @param assemblyName name of assembly * @param dropIndexesFirst if TRUE, deletes the index before creating a new one. FALSE, no index is created if it * already exists. * @throws IOException if configuration file can't be read * @throws CellbaseException if indexes file isn't found, or invalid input */ public void createMongoDBIndexes(String data, String speciesName, String assemblyName, boolean dropIndexesFirst) throws CellbaseException, IOException { Species species = SpeciesUtils.getSpecies(configuration, speciesName, assemblyName); if (StringUtils.isEmpty(data) || "all".equalsIgnoreCase(data)) { createMongoDBIndexes(new String[0], species.getSpecies(), species.getAssembly(), dropIndexesFirst); } else { String[] collections = data.split(","); createMongoDBIndexes(collections, species.getSpecies(), species.getAssembly(), dropIndexesFirst); } } /** * Create indexes for specified collection. Use by the load to create indexes. Will throw an exception if * given database does not already exist. * * @param collectionName create indexes for this collection * @param databaseName name of database * @param dropIndexesFirst if TRUE, deletes the index before creating a new one. FALSE, no index is created if it * already exists. * @throws IOException if configuration file can't be read * @throws CellbaseException if indexes file isn't found */ public void createMongoDBIndexes(String collectionName, String databaseName, boolean dropIndexesFirst) throws IOException, CellbaseException { InputStream resourceAsStream = IndexManager.class.getResourceAsStream("/mongodb-indexes.json"); if (resourceAsStream == null) { throw new CellbaseException("Index file mongodb-indexes.json not found"); } MongoDBAdaptorFactory factory = new MongoDBAdaptorFactory(configuration); MongoDataStore mongoDataStore = factory.getMongoDBDatastore(databaseName); MongoDBIndexUtils.createIndexes(mongoDataStore, resourceAsStream, collectionName, dropIndexesFirst); } private void createMongoDBIndexes(String[] collections, String species, String assembly, boolean dropIndexesFirst) throws IOException, CellbaseException { InputStream resourceAsStream = IndexManager.class.getResourceAsStream("/mongodb-indexes.json"); if (resourceAsStream == null) { throw new CellbaseException("Index file mongodb-indexes.json not found"); } MongoDBAdaptorFactory factory = new MongoDBAdaptorFactory(configuration); MongoDataStore mongoDataStore = factory.getMongoDBDatastore(species, assembly); if (collections == null || collections.length == 0) { MongoDBIndexUtils.createAllIndexes(mongoDataStore, resourceAsStream, dropIndexesFirst); } else { for (String collectionName : collections) { try { MongoDBIndexUtils.createIndexes(mongoDataStore, resourceAsStream, collectionName, dropIndexesFirst); } catch (NullPointerException e) { throw new CellbaseException("Error creating an index for collection '" + collectionName + "', collection does not exist"); } } } } }
allow ALL as an option when indexing
cellbase-lib/src/main/java/org/opencb/cellbase/lib/indexer/IndexManager.java
allow ALL as an option when indexing
Java
apache-2.0
34329cb4200df437036ce16553f94a1963d95d45
0
trekawek/jackrabbit-oak,apache/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,amit-jain/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,amit-jain/jackrabbit-oak,amit-jain/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,anchela/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,trekawek/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,apache/jackrabbit-oak,trekawek/jackrabbit-oak
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.jcr.query; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.apache.jackrabbit.core.query.FulltextQueryTest; import org.apache.jackrabbit.core.query.FulltextSQL2QueryTest; import org.apache.jackrabbit.core.query.JoinTest; import org.apache.jackrabbit.core.query.LimitAndOffsetTest; import org.apache.jackrabbit.core.query.MixinTest; import org.apache.jackrabbit.core.query.OrderByTest; import org.apache.jackrabbit.core.query.ParentNodeTest; import org.apache.jackrabbit.core.query.PathQueryNodeTest; import org.apache.jackrabbit.core.query.SQL2OffsetLimitTest; import org.apache.jackrabbit.core.query.SQL2OuterJoinTest; import org.apache.jackrabbit.core.query.SQLTest; import org.apache.jackrabbit.core.query.SkipDeletedNodesTest; import org.apache.jackrabbit.test.ConcurrentTestSuite; public class QueryJcrTest extends TestCase { public static Test suite() { TestSuite suite = new ConcurrentTestSuite( "Jackrabbit query tests using a Lucene based index"); suite.addTestSuite(FulltextQueryTest.class); suite.addTestSuite(SQLTest.class); suite.addTestSuite(JoinTest.class); suite.addTestSuite(SkipDeletedNodesTest.class); suite.addTestSuite(PathQueryNodeTest.class); suite.addTestSuite(FulltextSQL2QueryTest.class); // FIXME See OAK-957: SQL2NodeLocalNameTest test failures // suite.addTestSuite(SQL2NodeLocalNameTest.class); suite.addTestSuite(MixinTest.class); suite.addTestSuite(SQL2OuterJoinTest.class); suite.addTestSuite(SQL2OffsetLimitTest.class); suite.addTestSuite(LimitAndOffsetTest.class); suite.addTestSuite(OrderByTest.class); suite.addTestSuite(ParentNodeTest.class); // FAILURES // // suite.addTestSuite(SQL2OrderByTest.class); // order by score is not stable // suite.addTestSuite(QueryResultTest.class); // OAK-484 // suite.addTestSuite(ExcerptTest.class); // OAK-318 // suite.addTestSuite(SimilarQueryTest.class); // OAK-319 // suite.addTestSuite(DerefTest.class); // OAK-321 // suite.addTestSuite(XPathAxisTest.class); // OAK-322 // suite.addTestSuite(SQL2QueryResultTest.class); // OAK-323 // suite.addTestSuite(SimpleQueryTest.class); // OAK-327 // suite.addTestSuite(FnNameQueryTest.class); // OAK-328 // suite.addTestSuite(UpperLowerCaseQueryTest.class); // OAK-329 // suite.addTestSuite(SQL2PathEscapingTest.class); // OAK-481 // NOT IMPLEMENTED // // suite.addTestSuite(ChildAxisQueryTest.class); // sns // suite.addTestSuite(SelectClauseTest.class); // sns // suite.addTestSuite(ShareableNodeTest.class); // ws#clone // suite.addTestSuite(VersionStoreQueryTest.class); // versioning // TOO JR SPECIFIC // // suite.addTestSuite(LimitedAccessQueryTest.class); // acls // suite.addTestSuite(SkipDeniedNodesTest.class); // acls return suite; } }
oak-lucene/src/test/java/org/apache/jackrabbit/oak/jcr/query/QueryJcrTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.jcr.query; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.apache.jackrabbit.core.query.FulltextQueryTest; import org.apache.jackrabbit.core.query.FulltextSQL2QueryTest; import org.apache.jackrabbit.core.query.JoinTest; import org.apache.jackrabbit.core.query.LimitAndOffsetTest; import org.apache.jackrabbit.core.query.MixinTest; import org.apache.jackrabbit.core.query.OrderByTest; import org.apache.jackrabbit.core.query.ParentNodeTest; import org.apache.jackrabbit.core.query.PathQueryNodeTest; import org.apache.jackrabbit.core.query.SQL2NodeLocalNameTest; import org.apache.jackrabbit.core.query.SQL2OffsetLimitTest; import org.apache.jackrabbit.core.query.SQL2OuterJoinTest; import org.apache.jackrabbit.core.query.SQLTest; import org.apache.jackrabbit.core.query.SkipDeletedNodesTest; import org.apache.jackrabbit.test.ConcurrentTestSuite; public class QueryJcrTest extends TestCase { public static Test suite() { TestSuite suite = new ConcurrentTestSuite( "Jackrabbit query tests using a Lucene based index"); suite.addTestSuite(FulltextQueryTest.class); suite.addTestSuite(SQLTest.class); suite.addTestSuite(JoinTest.class); suite.addTestSuite(SkipDeletedNodesTest.class); suite.addTestSuite(PathQueryNodeTest.class); suite.addTestSuite(FulltextSQL2QueryTest.class); suite.addTestSuite(SQL2NodeLocalNameTest.class); suite.addTestSuite(MixinTest.class); suite.addTestSuite(SQL2OuterJoinTest.class); suite.addTestSuite(SQL2OffsetLimitTest.class); suite.addTestSuite(LimitAndOffsetTest.class); suite.addTestSuite(OrderByTest.class); suite.addTestSuite(ParentNodeTest.class); // FAILURES // // suite.addTestSuite(SQL2OrderByTest.class); // order by score is not stable // suite.addTestSuite(QueryResultTest.class); // OAK-484 // suite.addTestSuite(ExcerptTest.class); // OAK-318 // suite.addTestSuite(SimilarQueryTest.class); // OAK-319 // suite.addTestSuite(DerefTest.class); // OAK-321 // suite.addTestSuite(XPathAxisTest.class); // OAK-322 // suite.addTestSuite(SQL2QueryResultTest.class); // OAK-323 // suite.addTestSuite(SimpleQueryTest.class); // OAK-327 // suite.addTestSuite(FnNameQueryTest.class); // OAK-328 // suite.addTestSuite(UpperLowerCaseQueryTest.class); // OAK-329 // suite.addTestSuite(SQL2PathEscapingTest.class); // OAK-481 // NOT IMPLEMENTED // // suite.addTestSuite(ChildAxisQueryTest.class); // sns // suite.addTestSuite(SelectClauseTest.class); // sns // suite.addTestSuite(ShareableNodeTest.class); // ws#clone // suite.addTestSuite(VersionStoreQueryTest.class); // versioning // TOO JR SPECIFIC // // suite.addTestSuite(LimitedAccessQueryTest.class); // acls // suite.addTestSuite(SkipDeniedNodesTest.class); // acls return suite; } }
OAK-957: SQL2NodeLocalNameTest test failures temporarily disable failing tests git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1513369 13f79535-47bb-0310-9956-ffa450edef68
oak-lucene/src/test/java/org/apache/jackrabbit/oak/jcr/query/QueryJcrTest.java
OAK-957: SQL2NodeLocalNameTest test failures temporarily disable failing tests
Java
apache-2.0
0fb943a5addde1d0e41f38e6b0e8a6f31ce14fe4
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.progress.util; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.StandardProgressIndicator; import com.intellij.openapi.progress.WrappedProgressIndicator; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class ProgressWrapper extends AbstractProgressIndicatorBase implements WrappedProgressIndicator, StandardProgressIndicator { private final ProgressIndicator myOriginal; private final boolean myCheckCanceledForMe; private final int nested; protected ProgressWrapper(@NotNull ProgressIndicator original) { this(original, false); } protected ProgressWrapper(@NotNull ProgressIndicator original, boolean checkCanceledForMe) { if (!(original instanceof StandardProgressIndicator)) { throw new IllegalArgumentException("Original indicator " + original + " must be StandardProgressIndicator but got: " + original.getClass()); } myOriginal = original; myCheckCanceledForMe = checkCanceledForMe; nested = 1 + (original instanceof ProgressWrapper ? ((ProgressWrapper)original).nested : -1); //if (nested > 50) { // LOG.error("Too many wrapped indicators"); //} ProgressManager.assertNotCircular(original); dontStartActivity(); } @Override public final void cancel() { super.cancel(); } @Override public final boolean isCanceled() { ProgressWrapper current = this; while (true) { if (current.myCheckCanceledForMe && current.isCanceledRaw()) return true; ProgressIndicator original = current.getOriginalProgressIndicator(); if (original instanceof ProgressWrapper) { current = (ProgressWrapper)original; } else { return original.isCanceled(); } } } @Nullable @Override protected Throwable getCancellationTrace() { if (myOriginal instanceof AbstractProgressIndicatorBase) { return ((AbstractProgressIndicatorBase)myOriginal).getCancellationTrace(); } return super.getCancellationTrace(); } private boolean isCanceledRaw() { return super.isCanceled(); } private void checkCanceledRaw() { super.checkCanceled(); } @Override public final void checkCanceled() { ProgressWrapper current = this; while (true) { if (current.isCanceledRaw()) { current.checkCanceledRaw(); } ProgressIndicator original = current.getOriginalProgressIndicator(); if (original instanceof ProgressWrapper) { current = (ProgressWrapper)original; } else { original.checkCanceled(); break; } } } @Override public void setText(String text) { super.setText(text); myOriginal.setText(text); } @Override public void setText2(String text) { super.setText2(text); myOriginal.setText2(text); } @Override public void setFraction(double fraction) { super.setFraction(fraction); myOriginal.setFraction(fraction); } @Override public void setIndeterminate(boolean indeterminate) { myOriginal.setIndeterminate(indeterminate); } @Override public boolean isIndeterminate() { return myOriginal.isIndeterminate(); } @NotNull @Override public ModalityState getModalityState() { return myOriginal.getModalityState(); } @Override @NotNull public ProgressIndicator getOriginalProgressIndicator() { return myOriginal; } @Contract(value = "null -> null; !null -> !null", pure = true) public static ProgressWrapper wrap(@Nullable ProgressIndicator indicator) { return indicator == null || indicator instanceof ProgressWrapper ? (ProgressWrapper)indicator : new ProgressWrapper(indicator); } @Contract(value = "null -> null; !null -> !null", pure = true) public static ProgressIndicator unwrap(ProgressIndicator indicator) { return indicator instanceof ProgressWrapper ? ((ProgressWrapper)indicator).getOriginalProgressIndicator() : indicator; } @NotNull public static ProgressIndicator unwrapAll(@NotNull ProgressIndicator indicator) { while (indicator instanceof ProgressWrapper) { indicator = ((ProgressWrapper)indicator).getOriginalProgressIndicator(); } return indicator; } }
platform/core-impl/src/com/intellij/openapi/progress/util/ProgressWrapper.java
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.progress.util; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.StandardProgressIndicator; import com.intellij.openapi.progress.WrappedProgressIndicator; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class ProgressWrapper extends AbstractProgressIndicatorBase implements WrappedProgressIndicator, StandardProgressIndicator { private final ProgressIndicator myOriginal; private final boolean myCheckCanceledForMe; private final int nested; protected ProgressWrapper(@NotNull ProgressIndicator original) { this(original, false); } protected ProgressWrapper(@NotNull ProgressIndicator original, boolean checkCanceledForMe) { if (!(original instanceof StandardProgressIndicator)) { throw new IllegalArgumentException("Original indicator " + original + " must be StandardProgressIndicator but got: " + original.getClass()); } myOriginal = original; myCheckCanceledForMe = checkCanceledForMe; nested = 1 + (original instanceof ProgressWrapper ? ((ProgressWrapper)original).nested : -1); //if (nested > 50) { // LOG.error("Too many wrapped indicators"); //} ProgressManager.assertNotCircular(original); dontStartActivity(); } @Override public final void cancel() { super.cancel(); } @Override public final boolean isCanceled() { ProgressWrapper current = this; while (true) { if (current.myCheckCanceledForMe && current.isCanceledRaw()) return true; ProgressIndicator original = current.getOriginalProgressIndicator(); if (original instanceof ProgressWrapper) { current = (ProgressWrapper)original; } else { return original.isCanceled(); } } } @Nullable @Override protected Throwable getCancellationTrace() { if (myOriginal instanceof AbstractProgressIndicatorBase) { return ((AbstractProgressIndicatorBase)myOriginal).getCancellationTrace(); } return super.getCancellationTrace(); } private boolean isCanceledRaw() { return super.isCanceled(); } private void checkCanceledRaw() { super.checkCanceled(); } @Override public final void checkCanceled() { ProgressWrapper current = this; while (true) { if (current.isCanceledRaw()) { current.checkCanceledRaw(); } ProgressIndicator original = current.getOriginalProgressIndicator(); if (original instanceof ProgressWrapper) { current = (ProgressWrapper)original; } else { original.checkCanceled(); break; } } } @Override public void setText(String text) { super.setText(text); myOriginal.setText(text); } @Override public void setText2(String text) { super.setText2(text); myOriginal.setText2(text); } @Override public void setFraction(double fraction) { super.setFraction(fraction); myOriginal.setFraction(fraction); } @Override public void setIndeterminate(boolean indeterminate) { super.setIndeterminate(indeterminate); myOriginal.setIndeterminate(indeterminate); } @NotNull @Override public ModalityState getModalityState() { return myOriginal.getModalityState(); } @Override @NotNull public ProgressIndicator getOriginalProgressIndicator() { return myOriginal; } @Contract(value = "null -> null; !null -> !null", pure = true) public static ProgressWrapper wrap(@Nullable ProgressIndicator indicator) { return indicator == null || indicator instanceof ProgressWrapper ? (ProgressWrapper)indicator : new ProgressWrapper(indicator); } @Contract(value = "null -> null; !null -> !null", pure = true) public static ProgressIndicator unwrap(ProgressIndicator indicator) { return indicator instanceof ProgressWrapper ? ((ProgressWrapper)indicator).getOriginalProgressIndicator() : indicator; } @NotNull public static ProgressIndicator unwrapAll(@NotNull ProgressIndicator indicator) { while (indicator instanceof ProgressWrapper) { indicator = ((ProgressWrapper)indicator).getOriginalProgressIndicator(); } return indicator; } }
fully delegate ProgressWrapper.indeterminate to the wrappee to prevent "This progress indicator is indeterminate" warnings via code like: if (!ind.isIndeterminate()) { ind.setFraction(...); } GitOrigin-RevId: 6447f66403630fe5f479bbe876fdddc8727c7cea
platform/core-impl/src/com/intellij/openapi/progress/util/ProgressWrapper.java
fully delegate ProgressWrapper.indeterminate to the wrappee
Java
apache-2.0
3b73baa489ab04649b1444c83f534565e5c905ea
0
Sargul/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,Sargul/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,dbeaver/dbeaver,Sargul/dbeaver,dbeaver/dbeaver
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.controls.resultset; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.*; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBPDataSource; import org.jkiss.dbeaver.model.DBPEvaluationContext; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.DBValueFormatting; import org.jkiss.dbeaver.model.data.DBDAttributeBinding; import org.jkiss.dbeaver.model.data.DBDAttributeTransformerDescriptor; import org.jkiss.dbeaver.model.preferences.DBPPropertyDescriptor; import org.jkiss.dbeaver.model.virtual.DBVEntity; import org.jkiss.dbeaver.model.virtual.DBVEntityAttribute; import org.jkiss.dbeaver.model.virtual.DBVTransformSettings; import org.jkiss.dbeaver.model.virtual.DBVUtils; import org.jkiss.dbeaver.runtime.DBWorkbench; import org.jkiss.dbeaver.runtime.properties.PropertySourceCustom; import org.jkiss.dbeaver.ui.DBeaverIcons; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.dialogs.BaseDialog; import org.jkiss.dbeaver.ui.properties.PropertyTreeViewer; import org.jkiss.utils.CommonUtils; import java.util.*; import java.util.List; class TransformerSettingsDialog extends BaseDialog { private static final Log log = Log.getLog(TransformerSettingsDialog.class); private final ResultSetViewer viewer; private final DBVEntity vEntitySrc; private final DBVEntity vEntity; private DBDAttributeBinding currentAttribute; private DBVTransformSettings settings; private PropertyTreeViewer propertiesEditor; private PropertySourceCustom propertySource; private boolean selector; private List<? extends DBDAttributeTransformerDescriptor> transformerList; private Text infoText; private DBDAttributeTransformerDescriptor transformer; private Combo transformerCombo; private Table attributeTable; TransformerSettingsDialog(ResultSetViewer viewer) { this(viewer, null, null, false); } TransformerSettingsDialog(ResultSetViewer viewer, DBDAttributeBinding currentAttribute, DBVTransformSettings settings, boolean selector) { super(viewer.getControl().getShell(), DBUtils.getObjectFullName(viewer.getDataContainer(), DBPEvaluationContext.UI) + " transforms", null); this.viewer = viewer; this.currentAttribute = currentAttribute; this.settings = settings; this.selector = selector; this.vEntitySrc = DBVUtils.getVirtualEntity(viewer.getDataContainer(), true); this.vEntity = new DBVEntity(vEntitySrc.getContainer(), vEntitySrc); } @Override protected Composite createDialogArea(Composite parent) { Composite composite = super.createDialogArea(parent); Composite panel = composite; if (selector) { SashForm divider = new SashForm(composite, SWT.HORIZONTAL); divider.setSashWidth(10); divider.setLayoutData(new GridData(GridData.FILL_BOTH)); panel = divider; createAttributeSelectorArea(panel); } else { if (currentAttribute != null) { detectTransformers(); } } createTransformSettingsArea(panel); if (currentAttribute != null) { updateTransformerInfo(); } return parent; } private void createAttributeSelectorArea(Composite composite) { Composite panel = UIUtils.createComposite(composite, 1); attributeTable = new Table(panel, SWT.FULL_SELECTION | SWT.BORDER); attributeTable.setHeaderVisible(true); GridData gd = new GridData(GridData.FILL_BOTH); gd.widthHint = 400; attributeTable.setLayoutData(gd); UIUtils.executeOnResize(attributeTable, () -> UIUtils.packColumns(attributeTable, true)); UIUtils.createTableColumn(attributeTable, SWT.LEFT, "Name"); UIUtils.createTableColumn(attributeTable, SWT.LEFT, "Transforms"); for (DBDAttributeBinding attr : viewer.getModel().getVisibleAttributes()) { TableItem attrItem = new TableItem(attributeTable, SWT.NONE);; attrItem.setData(attr); attrItem.setText(0, attr.getName()); attrItem.setImage(0, DBeaverIcons.getImage(DBValueFormatting.getObjectImage(attr, true))); updateTransformItem(attrItem); if (this.currentAttribute == attr) { attributeTable.setSelection(attrItem); } } attributeTable.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { updateAttributeSelection(); } }); } private void updateTransformItem(TableItem attrItem) { DBDAttributeBinding attr = (DBDAttributeBinding) attrItem.getData(); String transformStr = ""; DBVEntityAttribute vAttr = vEntity.getVirtualAttribute(attr, false); if (vAttr != null) { DBVTransformSettings settings = vAttr.getTransformSettings(); if (settings != null) { if (!CommonUtils.isEmpty(settings.getIncludedTransformers())) { transformStr = String.join(",", settings.getIncludedTransformers()); } else if (!CommonUtils.isEmpty(settings.getCustomTransformer())) { DBDAttributeTransformerDescriptor td = DBWorkbench.getPlatform().getValueHandlerRegistry().getTransformer(settings.getCustomTransformer()); if (td != null) { transformStr = td.getName(); } } } } attrItem.setText(1, transformStr); } private void updateAttributeSelection() { if (currentAttribute != null) { saveTransformerSettings(); for (TableItem item : attributeTable.getItems()) { if (item.getData() == currentAttribute) { updateTransformItem(item); break; } } } if (attributeTable.getSelectionIndex() < 0) { currentAttribute = null; } else { currentAttribute = (DBDAttributeBinding) attributeTable.getItem(attributeTable.getSelectionIndex()).getData(); detectTransformers(); updateTransformerInfo(); } } private void detectTransformers() { final DBPDataSource dataSource = viewer.getDataSource(); DBVEntityAttribute vAttr = vEntity.getVirtualAttribute(currentAttribute, false); if (settings == null) { settings = vAttr == null ? null : DBVUtils.getTransformSettings(vAttr, false); } if (dataSource != null && settings != null && !CommonUtils.isEmpty(settings.getCustomTransformer())) { transformer = dataSource.getContainer().getPlatform().getValueHandlerRegistry().getTransformer(settings.getCustomTransformer()); } else { transformer = null; } transformerList = DBWorkbench.getPlatform().getValueHandlerRegistry().findTransformers(currentAttribute.getDataSource(), currentAttribute, null); } private void updateTransformerInfo() { if (selector) { transformerCombo.removeAll(); transformerCombo.add(ResultSetViewer.EMPTY_TRANSFORMER_NAME); if (transformerList != null && selector) { for (DBDAttributeTransformerDescriptor td : transformerList) { transformerCombo.add(td.getName()); if (td == transformer) { transformerCombo.select(transformerCombo.getItemCount() - 1); } } } if (transformerCombo.getSelectionIndex() < 0) { transformerCombo.select(0); } } if (infoText != null) { if (transformer != null && transformer.getDescription() != null) { infoText.setText(transformer.getDescription()); } else { infoText.setText(""); } } if (transformer != null) { Collection<? extends DBPPropertyDescriptor> transformerProperties = transformer.getProperties(); loadTransformerSettings(transformerProperties); } else { loadTransformerSettings(Collections.emptyList()); } } private void saveTransformerSettings() { if (currentAttribute == null || (settings == null && transformer == null)) { // Nothign to save - just ignore return; } if (settings == null) { settings = DBVUtils.getTransformSettings(vEntity.getVirtualAttribute(currentAttribute, true), true); } if (selector) { settings.setCustomTransformer(transformer == null ? null : transformer.getId()); } if (transformer == null) { settings.setTransformOptions(new LinkedHashMap<>()); } else { final Map<Object, Object> properties = propertySource.getPropertiesWithDefaults(); for (Map.Entry<Object, Object> prop : properties.entrySet()) { if (prop.getValue() != null) { settings.setTransformOption(prop.getKey().toString(), prop.getValue().toString()); } } } } private void createTransformSettingsArea(Composite composite) { Composite settingsPanel = UIUtils.createComposite(composite, 1); if (selector || transformer != null) { final Composite placeholder = UIUtils.createControlGroup(settingsPanel, "Transformer", 2, GridData.FILL_HORIZONTAL, -1); if (!selector) { UIUtils.createLabelText(placeholder, "Name", transformer.getName(), SWT.READ_ONLY); } else { transformerCombo = UIUtils.createLabelCombo(placeholder, "Name", SWT.DROP_DOWN | SWT.READ_ONLY); transformerCombo.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); transformerCombo.add(ResultSetViewer.EMPTY_TRANSFORMER_NAME); transformerCombo.select(0); transformerCombo.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { int selectionIndex = transformerCombo.getSelectionIndex(); if (selectionIndex == 0) { transformer = null; infoText.setText("N/A"); loadTransformerSettings(Collections.emptyList()); } else { transformer = transformerList.get(selectionIndex - 1); infoText.setText(CommonUtils.notEmpty(transformer.getDescription())); loadTransformerSettings(transformer.getProperties()); } updateAttributeSelection(); composite.layout(true, true); } }); } Label infoLabel = UIUtils.createControlLabel(settingsPanel, "Info"); infoLabel.setLayoutData(new GridData(GridData.VERTICAL_ALIGN_BEGINNING)); infoText = new Text(settingsPanel, SWT.READ_ONLY | SWT.WRAP); GridData gd = new GridData(GridData.FILL_HORIZONTAL); gd.widthHint = 300; infoText.setLayoutData(gd); } propertiesEditor = new PropertyTreeViewer(settingsPanel, SWT.BORDER); propertiesEditor.getControl().setFocus(); } private void loadTransformerSettings(Collection<? extends DBPPropertyDescriptor> properties) { Map<String, Object> transformOptions = settings == null ? null : settings.getTransformOptions(); if (transformOptions == null) { transformOptions = Collections.emptyMap(); } propertySource = new PropertySourceCustom( properties, transformOptions); propertiesEditor.loadProperties(propertySource); } @Override protected void createButtonsForButtonBar(Composite parent) { createButton(parent, IDialogConstants.OK_ID, IDialogConstants.OK_LABEL, true); createButton(parent, IDialogConstants.CANCEL_ID, IDialogConstants.CANCEL_LABEL, false); } @Override protected void okPressed() { saveTransformerSettings(); vEntitySrc.copyFrom(vEntity); vEntitySrc.persistConfiguration(); super.okPressed(); } }
plugins/org.jkiss.dbeaver.ui.editors.data/src/org/jkiss/dbeaver/ui/controls/resultset/TransformerSettingsDialog.java
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.controls.resultset; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.*; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBPDataSource; import org.jkiss.dbeaver.model.DBPEvaluationContext; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.DBValueFormatting; import org.jkiss.dbeaver.model.data.DBDAttributeBinding; import org.jkiss.dbeaver.model.data.DBDAttributeTransformerDescriptor; import org.jkiss.dbeaver.model.preferences.DBPPropertyDescriptor; import org.jkiss.dbeaver.model.virtual.DBVEntity; import org.jkiss.dbeaver.model.virtual.DBVEntityAttribute; import org.jkiss.dbeaver.model.virtual.DBVTransformSettings; import org.jkiss.dbeaver.model.virtual.DBVUtils; import org.jkiss.dbeaver.runtime.DBWorkbench; import org.jkiss.dbeaver.runtime.properties.PropertySourceCustom; import org.jkiss.dbeaver.ui.DBeaverIcons; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.dialogs.BaseDialog; import org.jkiss.dbeaver.ui.properties.PropertyTreeViewer; import org.jkiss.utils.CommonUtils; import java.util.*; import java.util.List; class TransformerSettingsDialog extends BaseDialog { private static final Log log = Log.getLog(TransformerSettingsDialog.class); private final ResultSetViewer viewer; private final DBVEntity vEntitySrc; private final DBVEntity vEntity; private DBDAttributeBinding currentAttribute; private DBVTransformSettings settings; private PropertyTreeViewer propertiesEditor; private PropertySourceCustom propertySource; private boolean selector; private List<? extends DBDAttributeTransformerDescriptor> transformerList; private Text infoText; private DBDAttributeTransformerDescriptor transformer; private Combo transformerCombo; private Table attributeTable; TransformerSettingsDialog(ResultSetViewer viewer) { this(viewer, null, null, false); } TransformerSettingsDialog(ResultSetViewer viewer, DBDAttributeBinding currentAttribute, DBVTransformSettings settings, boolean selector) { super(viewer.getControl().getShell(), DBUtils.getObjectFullName(viewer.getDataContainer(), DBPEvaluationContext.UI) + " transforms", null); this.viewer = viewer; this.currentAttribute = currentAttribute; this.settings = settings; this.selector = selector; this.vEntitySrc = DBVUtils.getVirtualEntity(viewer.getDataContainer(), true); this.vEntity = new DBVEntity(vEntitySrc.getContainer(), vEntitySrc); } @Override protected Composite createDialogArea(Composite parent) { Composite composite = super.createDialogArea(parent); Composite panel = composite; if (selector) { SashForm divider = new SashForm(composite, SWT.HORIZONTAL); divider.setSashWidth(10); divider.setLayoutData(new GridData(GridData.FILL_BOTH)); panel = divider; createAttributeSelectorArea(panel); } else { if (currentAttribute != null) { detectTransformers(); } } createTransformSettingsArea(panel); if (currentAttribute != null) { updateTransformerInfo(); } return parent; } private void createAttributeSelectorArea(Composite composite) { Composite panel = UIUtils.createComposite(composite, 1); attributeTable = new Table(panel, SWT.FULL_SELECTION | SWT.BORDER); attributeTable.setHeaderVisible(true); GridData gd = new GridData(GridData.FILL_BOTH); gd.widthHint = 400; attributeTable.setLayoutData(gd); UIUtils.executeOnResize(attributeTable, () -> UIUtils.packColumns(attributeTable, true)); UIUtils.createTableColumn(attributeTable, SWT.LEFT, "Name"); UIUtils.createTableColumn(attributeTable, SWT.LEFT, "Transforms"); for (DBDAttributeBinding attr : viewer.getModel().getVisibleAttributes()) { TableItem attrItem = new TableItem(attributeTable, SWT.NONE);; attrItem.setData(attr); attrItem.setText(0, attr.getName()); attrItem.setImage(0, DBeaverIcons.getImage(DBValueFormatting.getObjectImage(attr, true))); updateTransformItem(attrItem); if (this.currentAttribute == attr) { attributeTable.setSelection(attrItem); } } attributeTable.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { updateAttributeSelection(); } }); } private void updateTransformItem(TableItem attrItem) { DBDAttributeBinding attr = (DBDAttributeBinding) attrItem.getData(); String transformStr = ""; DBVEntityAttribute vAttr = vEntity.getVirtualAttribute(attr, false); if (vAttr != null) { DBVTransformSettings settings = vAttr.getTransformSettings(); if (settings != null) { if (!CommonUtils.isEmpty(settings.getIncludedTransformers())) { transformStr = String.join(",", settings.getIncludedTransformers()); } else if (!CommonUtils.isEmpty(settings.getCustomTransformer())) { DBDAttributeTransformerDescriptor td = DBWorkbench.getPlatform().getValueHandlerRegistry().getTransformer(settings.getCustomTransformer()); if (td != null) { transformStr = td.getName(); } } } } attrItem.setText(1, transformStr); } private void updateAttributeSelection() { if (currentAttribute != null) { saveTransformerSettings(); for (TableItem item : attributeTable.getItems()) { if (item.getData() == currentAttribute) { updateTransformItem(item); break; } } } if (attributeTable.getSelectionIndex() < 0) { currentAttribute = null; } else { currentAttribute = (DBDAttributeBinding) attributeTable.getItem(attributeTable.getSelectionIndex()).getData(); detectTransformers(); updateTransformerInfo(); } } private void detectTransformers() { final DBPDataSource dataSource = viewer.getDataSource(); DBVEntityAttribute vAttr = vEntity.getVirtualAttribute(currentAttribute, false); settings = vAttr == null ? null : DBVUtils.getTransformSettings(vAttr, false); if (dataSource != null && settings != null && !CommonUtils.isEmpty(settings.getCustomTransformer())) { transformer = dataSource.getContainer().getPlatform().getValueHandlerRegistry().getTransformer(settings.getCustomTransformer()); } else { transformer = null; } transformerList = DBWorkbench.getPlatform().getValueHandlerRegistry().findTransformers(currentAttribute.getDataSource(), currentAttribute, null); } private void updateTransformerInfo() { if (selector) { transformerCombo.removeAll(); transformerCombo.add(ResultSetViewer.EMPTY_TRANSFORMER_NAME); if (transformerList != null && selector) { for (DBDAttributeTransformerDescriptor td : transformerList) { transformerCombo.add(td.getName()); if (td == transformer) { transformerCombo.select(transformerCombo.getItemCount() - 1); } } } if (transformerCombo.getSelectionIndex() < 0) { transformerCombo.select(0); } } if (infoText != null) { if (transformer != null && transformer.getDescription() != null) { infoText.setText(transformer.getDescription()); } else { infoText.setText(""); } } if (transformer != null) { Collection<? extends DBPPropertyDescriptor> transformerProperties = transformer.getProperties(); loadTransformerSettings(transformerProperties); } else { loadTransformerSettings(Collections.emptyList()); } } private void saveTransformerSettings() { if (currentAttribute == null || (settings == null && transformer == null)) { // Nothign to save - just ignore return; } if (settings == null) { settings = DBVUtils.getTransformSettings(vEntity.getVirtualAttribute(currentAttribute, true), true); } if (selector) { settings.setCustomTransformer(transformer == null ? null : transformer.getId()); } if (transformer == null) { settings.setTransformOptions(new LinkedHashMap<>()); } else { final Map<Object, Object> properties = propertySource.getPropertiesWithDefaults(); for (Map.Entry<Object, Object> prop : properties.entrySet()) { if (prop.getValue() != null) { settings.setTransformOption(prop.getKey().toString(), prop.getValue().toString()); } } } } private void createTransformSettingsArea(Composite composite) { Composite settingsPanel = UIUtils.createComposite(composite, 1); if (selector || transformer != null) { final Composite placeholder = UIUtils.createControlGroup(settingsPanel, "Transformer", 2, GridData.FILL_HORIZONTAL, -1); if (!selector) { UIUtils.createLabelText(placeholder, "Name", transformer.getName(), SWT.READ_ONLY); } else { transformerCombo = UIUtils.createLabelCombo(placeholder, "Name", SWT.DROP_DOWN | SWT.READ_ONLY); transformerCombo.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); transformerCombo.add(ResultSetViewer.EMPTY_TRANSFORMER_NAME); transformerCombo.select(0); transformerCombo.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { int selectionIndex = transformerCombo.getSelectionIndex(); if (selectionIndex == 0) { transformer = null; infoText.setText("N/A"); loadTransformerSettings(Collections.emptyList()); } else { transformer = transformerList.get(selectionIndex - 1); infoText.setText(CommonUtils.notEmpty(transformer.getDescription())); loadTransformerSettings(transformer.getProperties()); } updateAttributeSelection(); composite.layout(true, true); } }); } Label infoLabel = UIUtils.createControlLabel(settingsPanel, "Info"); infoLabel.setLayoutData(new GridData(GridData.VERTICAL_ALIGN_BEGINNING)); infoText = new Text(settingsPanel, SWT.READ_ONLY | SWT.WRAP); GridData gd = new GridData(GridData.FILL_HORIZONTAL); gd.widthHint = 300; infoText.setLayoutData(gd); } propertiesEditor = new PropertyTreeViewer(settingsPanel, SWT.BORDER); propertiesEditor.getControl().setFocus(); } private void loadTransformerSettings(Collection<? extends DBPPropertyDescriptor> properties) { Map<String, Object> transformOptions = settings == null ? null : settings.getTransformOptions(); if (transformOptions == null) { transformOptions = Collections.emptyMap(); } propertySource = new PropertySourceCustom( properties, transformOptions); propertiesEditor.loadProperties(propertySource); } @Override protected void createButtonsForButtonBar(Composite parent) { createButton(parent, IDialogConstants.OK_ID, IDialogConstants.OK_LABEL, true); createButton(parent, IDialogConstants.CANCEL_ID, IDialogConstants.CANCEL_LABEL, false); } @Override protected void okPressed() { saveTransformerSettings(); vEntitySrc.copyFrom(vEntity); vEntitySrc.persistConfiguration(); super.okPressed(); } }
#6696 Column transformers for custom SQL queries Former-commit-id: a59085f81e6cb82f2c8e079c2d9061a5b2888cb0
plugins/org.jkiss.dbeaver.ui.editors.data/src/org/jkiss/dbeaver/ui/controls/resultset/TransformerSettingsDialog.java
#6696 Column transformers for custom SQL queries
Java
bsd-2-clause
bc5532974f3455d4dbd6a268b3ecee425de7b0a0
0
bliksemlabs/bliksemintegration-realtime
/** * Copyright (C) 2012 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.transit.realtime; import com.google.protobuf.ExtensionRegistry; /** * Support for GTFS-realtime extensions. * * @author bdferris * */ public class GtfsRealtimeExtensions { /** * Adds all known GTFS-realtime extension messages to the specified extension * registry. * * @param registry */ public static void registerExtensions(ExtensionRegistry registry) { registry.add(GtfsRealtimeNYCT.nyctFeedHeader); registry.add(GtfsRealtimeNYCT.nyctStopTimeUpdate); registry.add(GtfsRealtimeNYCT.nyctTripDescriptor); registry.add(GtfsRealtimeOneBusAway.obaFeedHeader); registry.add(GtfsRealtimeOneBusAway.obaFeedEntity); registry.add(GtfsRealtimeOneBusAway.obaTripUpdate); registry.add(GtfsRealtimeOVapi.ovapiVehiclePosition); registry.add(GtfsRealtimeOVapi.ovapiTripUpdate); registry.add(GtfsRealtimeOVapi.ovapiStopTimeUpdate); registry.add(GtfsRealtimeOVapi.ovapiVehicleDescriptor); registry.add(GtfsRealtimeOVapi.ovapiStopTimeUpdate); registry.add(GtfsRealtimeOVapi.ovapiTripdescriptor); } }
src/main/java/com/google/transit/realtime/GtfsRealtimeExtensions.java
/** * Copyright (C) 2012 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.transit.realtime; import com.google.protobuf.ExtensionRegistry; /** * Support for GTFS-realtime extensions. * * @author bdferris */ public class GtfsRealtimeExtensions { /** * Adds all known GTFS-realtime extension messages to the specified extension * registry. * * @param registry */ public static void registerExtensions(ExtensionRegistry registry) { registry.add(GtfsRealtimeNYCT.nyctFeedHeader); registry.add(GtfsRealtimeNYCT.nyctStopTimeUpdate); registry.add(GtfsRealtimeNYCT.nyctTripDescriptor); registry.add(GtfsRealtimeOneBusAway.obaFeedHeader); registry.add(GtfsRealtimeOneBusAway.obaFeedEntity); registry.add(GtfsRealtimeOneBusAway.obaTripUpdate); registry.add(GtfsRealtimeOVapi.ovapiVehiclePosition); registry.add(GtfsRealtimeOVapi.ovapiTripUpdate); registry.add(GtfsRealtimeOVapi.ovapiStopTimeUpdate); registry.add(GtfsRealtimeOVapi.ovapiVehicleDescriptor); } }
Register ovapi tripdescriptor extension
src/main/java/com/google/transit/realtime/GtfsRealtimeExtensions.java
Register ovapi tripdescriptor extension
Java
mit
b4a0756ad85ad0118783abb18665852e90e4ecdf
0
conveyal/r5,conveyal/r5,conveyal/r5,conveyal/r5,conveyal/r5
package com.conveyal.r5.analyst; import com.beust.jcommander.ParameterException; import com.conveyal.r5.util.InputStreamProvider; import com.csvreader.CsvReader; import gnu.trove.list.TIntList; import gnu.trove.list.array.TIntArrayList; import org.locationtech.jts.geom.Envelope; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.Arrays; import static com.conveyal.r5.streets.VertexStore.fixedDegreesToFloating; /** * These are points serving as origins or destinations in an accessibility analysis which are not constrained to * a regular grid. Each point has an arbitrary latitude and longitude attached to it. * This class re-uses some of the legacy code, which was removed in R5 PR #338. */ public class FreeFormPointSet extends PointSet { private static final Logger LOG = LoggerFactory.getLogger(FreeFormPointSet.class); /** A unique identifier for each feature. */ private final String[] ids; /** The latitude of each point. */ private final double[] lats; /** The longitude of each point. */ private final double[] lons; /** The number of opportunities located at each point. */ private final double[] counts; // TODO check that all identifiers are unique /** * Create a FreeFormPointset from a CSV file, which must have latitude and longitude columns with the values of * latField and lonField in the header row. If idField is supplied, its column will be used to supply id values * for the points; if not, row numbers will be used as the ids. */ public static FreeFormPointSet fromCsv ( InputStreamProvider csvInputStreamProvider, String latField, String lonField, String idField, String countField ) throws IOException { /* First, scan through the file to count lines and check for rows with the wrong number of columns. */ int nRecs; int latCol = -1; int lonCol = -1; int idCol = -1; int countCol = -1; try (InputStream csvInputStream = csvInputStreamProvider.getInputStream()) { CsvReader reader = new CsvReader(csvInputStream, ',', StandardCharsets.UTF_8); reader.readHeaders(); int nCols = reader.getHeaderCount(); for (int c = 0; c < nCols; c++) { String header = reader.getHeader(c); if (header.equals(latField)) { latCol = c; } else if (header.equalsIgnoreCase(lonField)) { lonCol = c; } else if (header.equalsIgnoreCase(idField)) { idCol = c; } else if (header.equalsIgnoreCase(countField)) { countCol = c; } } if (latCol < 0 || lonCol < 0) { throw new ParameterException("CSV file did not contain the specified latitude or longitude column."); } if (idField != null && idCol < 0) { throw new ParameterException("CSV file did not contain the specified ID column."); } if (countField != null && countCol < 0) { throw new ParameterException("CSV file did not contain the specified opportunity count column."); } while (reader.readRecord()) { if (reader.getColumnCount() != nCols) { String message = String.format( "CSV header has %d fields, record %d has %d fields.", nCols, reader.getCurrentRecord(), reader.getColumnCount() ); throw new ParameterException(message); } } // getCurrentRecord is zero-based and does not include headers or blank lines // FIXME isn't this creating one record too many, and leaving it blank? Verify. nRecs = (int) reader.getCurrentRecord() + 1; } /* If we reached here, the file is entirely readable. Re-read it from the beginning and record values. */ // Note that we're doing two passes just so we know the array size. We could just use TIntLists. int rec = -1; try (InputStream csvInputStream = csvInputStreamProvider.getInputStream()) { CsvReader reader = new CsvReader(csvInputStream, ',', StandardCharsets.UTF_8); FreeFormPointSet ret = new FreeFormPointSet(nRecs); ret.name = countField != null ? countField : "[COUNT]"; reader.readHeaders(); while (reader.readRecord()) { rec = (int) reader.getCurrentRecord(); ret.lats[rec] = Double.parseDouble(reader.get(latCol)); ret.lons[rec] = Double.parseDouble(reader.get(lonCol)); // If ID column was specified and present, use it. Otherwise, use record number as ID. ret.ids[rec] = idCol < 0 ? String.valueOf(rec) : reader.get(idCol); // If count column was specified and present, use it. Otherwise, one opportunity per point. ret.counts[rec] = countCol < 0 ? 1D : Double.parseDouble(reader.get(countCol)); } Grid.checkWgsEnvelopeSize(ret.getWgsEnvelope()); return ret; } catch (NumberFormatException nfe) { throw new ParameterException( String.format("Improperly formatted floating point value on line %d of CSV input", rec) ); } } /** * @param capacity expected number of features to be added to this FreeFormPointSet. */ private FreeFormPointSet(int capacity) { ids = new String[capacity]; lats = new double[capacity]; lons = new double[capacity]; counts = new double[capacity]; } @Override public int featureCount() { return ids.length; } @Override public double sumTotalOpportunities () { // For now we always have one opportunity per point. return featureCount(); } @Override public double getLat(int i) { return lats[i]; } @Override public double getLon(int i) { return lons[i]; } /** * Write coordinates for these points, in binary format. * Note that this does not save any opportunity magnitudes or densities. We do not use those yet. * Note also that if we ever intend to use these directly in the UI we should switch to a * fixed-width little-endian representation or JSON. */ public void write (OutputStream outputStream) throws IOException { DataOutputStream out = new DataOutputStream(outputStream); // Header // TODO add identifier / version for future sanity checking? // Should name and description be here or in Mongo metadata? out.writeInt(ids.length); for (String id : ids) { out.writeUTF(id); } for (double lat : lats) { out.writeDouble(lat); } for (double lon : lons) { out.writeDouble(lon); } for (double count : counts) { out.writeDouble(count); } out.close(); } public FreeFormPointSet (InputStream inputStream) throws IOException { DataInputStream data = new DataInputStream(inputStream); int nPoints = data.readInt(); this.ids = new String[nPoints]; this.lats = new double[nPoints]; this.lons = new double[nPoints]; this.counts = new double[nPoints]; for (int i = 0; i < nPoints; i++) { ids[i] = data.readUTF(); } for (int i = 0; i < nPoints; i++) { lats[i] = data.readDouble(); } for (int i = 0; i < nPoints; i++) { lons[i] = data.readDouble(); } for (int i = 0; i < nPoints; i++) { counts[i] = data.readDouble(); } data.close(); } @Override public TIntList getPointsInEnvelope (Envelope envelopeFixedDegrees) { // Convert fixed-degree envelope to floating double west = fixedDegreesToFloating(envelopeFixedDegrees.getMinX()); double east = fixedDegreesToFloating(envelopeFixedDegrees.getMaxX()); double north = fixedDegreesToFloating(envelopeFixedDegrees.getMaxY()); double south = fixedDegreesToFloating(envelopeFixedDegrees.getMinY()); TIntList pointsInEnvelope = new TIntArrayList(); // Pixels are truncated toward zero, and coords increase toward East and South in web Mercator, so <= south/east. for (int i = 0; i < lats.length; i++) { if (lats[i] < north && lats[i] > south && lons[i] < east && lons[i] > west) pointsInEnvelope.add(i); } return pointsInEnvelope; } @Override public double getOpportunityCount (int i) { // For now, these points do not have attached opportunity counts. // We consider them to all have a count of 1. return 1D; } @Override public String getId (int i) { return ids[i]; } @Override public Envelope getWgsEnvelope () { if (lats.length == 1 || lons.length == 0) { LOG.error("Attempt to create envelope from empty lat/lon array."); return null; } double minLat = Arrays.stream(lats).min().getAsDouble(); double minLon = Arrays.stream(lons).min().getAsDouble(); double maxLat = Arrays.stream(lats).max().getAsDouble(); double maxLon = Arrays.stream(lons).max().getAsDouble(); Envelope envelope = new Envelope(minLon, maxLon, minLat, maxLat); return envelope; } @Override public WebMercatorExtents getWebMercatorExtents () { final int DEFAULT_ZOOM = 9; Envelope wgsEnvelope = this.getWgsEnvelope(); WebMercatorExtents webMercatorExtents = WebMercatorExtents.forWgsEnvelope(wgsEnvelope, DEFAULT_ZOOM); return webMercatorExtents; } }
src/main/java/com/conveyal/r5/analyst/FreeFormPointSet.java
package com.conveyal.r5.analyst; import com.beust.jcommander.ParameterException; import com.conveyal.r5.util.InputStreamProvider; import com.csvreader.CsvReader; import gnu.trove.list.TIntList; import gnu.trove.list.array.TIntArrayList; import org.locationtech.jts.geom.Envelope; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.Arrays; import static com.conveyal.r5.streets.VertexStore.fixedDegreesToFloating; /** * These are points serving as origins or destinations in an accessibility analysis which are not constrained to * a regular grid. Each point has an arbitrary latitude and longitude attached to it. * This class re-uses some of the legacy code, which was removed in R5 PR #338. */ public class FreeFormPointSet extends PointSet { private static final Logger LOG = LoggerFactory.getLogger(FreeFormPointSet.class); /** A unique identifier for each feature. */ private final String[] ids; /** The latitude of each point. */ private final double[] lats; /** The longitude of each point. */ private final double[] lons; /** The number of opportunities located at each point. */ private final double[] counts; // TODO check that all identifiers are unique /** * Create a FreeFormPointset from a CSV file, which must have latitude and longitude columns with the values of * latField and lonField in the header row. If idField is supplied, its column will be used to supply id values * for the points; if not, row numbers will be used as the ids. */ public static FreeFormPointSet fromCsv ( InputStreamProvider csvInputStreamProvider, String latField, String lonField, String idField, String countField ) throws IOException { /* First, scan through the file to count lines and check for rows with the wrong number of columns. */ int nRecs; int latCol = -1; int lonCol = -1; int idCol = -1; int countCol = -1; try (InputStream csvInputStream = csvInputStreamProvider.getInputStream()) { CsvReader reader = new CsvReader(csvInputStream, ',', StandardCharsets.UTF_8); reader.readHeaders(); int nCols = reader.getHeaderCount(); for (int c = 0; c < nCols; c++) { String header = reader.getHeader(c); if (header.equals(latField)) { latCol = c; } else if (header.equalsIgnoreCase(lonField)) { lonCol = c; } else if (header.equalsIgnoreCase(idField)) { idCol = c; } else if (header.equalsIgnoreCase(countField)) { countCol = c; } } if (latCol < 0 || lonCol < 0) { throw new ParameterException("CSV file did not contain the specified latitude or longitude column."); } if (idField != null && idCol < 0) { throw new ParameterException("CSV file did not contain the specified ID column."); } if (idField != null && idCol < 0) { throw new ParameterException("CSV file did not contain the specified opportunity count column."); } while (reader.readRecord()) { if (reader.getColumnCount() != nCols) { String message = String.format( "CSV header has %d fields, record %d has %d fields.", nCols, reader.getCurrentRecord(), reader.getColumnCount() ); throw new ParameterException(message); } } // getCurrentRecord is zero-based and does not include headers or blank lines // FIXME isn't this creating one record too many, and leaving it blank? Verify. nRecs = (int) reader.getCurrentRecord() + 1; } /* If we reached here, the file is entirely readable. Re-read it from the beginning and record values. */ // Note that we're doing two passes just so we know the array size. We could just use TIntLists. int rec = -1; try (InputStream csvInputStream = csvInputStreamProvider.getInputStream()) { CsvReader reader = new CsvReader(csvInputStream, ',', StandardCharsets.UTF_8); FreeFormPointSet ret = new FreeFormPointSet(nRecs); ret.name = countField != null ? countField : "[COUNT]"; reader.readHeaders(); while (reader.readRecord()) { rec = (int) reader.getCurrentRecord(); ret.lats[rec] = Double.parseDouble(reader.get(latCol)); ret.lons[rec] = Double.parseDouble(reader.get(lonCol)); // If ID column was specified and present, use it. Otherwise, use record number as ID. ret.ids[rec] = idCol < 0 ? String.valueOf(rec) : reader.get(idCol); // If count column was specified and present, use it. Otherwise, one opportunity per point. ret.counts[rec] = countCol < 0 ? 1D : Double.parseDouble(reader.get(countCol)); } Grid.checkWgsEnvelopeSize(ret.getWgsEnvelope()); return ret; } catch (NumberFormatException nfe) { throw new ParameterException( String.format("Improperly formatted floating point value on line %d of CSV input", rec) ); } } /** * @param capacity expected number of features to be added to this FreeFormPointSet. */ private FreeFormPointSet(int capacity) { ids = new String[capacity]; lats = new double[capacity]; lons = new double[capacity]; counts = new double[capacity]; } @Override public int featureCount() { return ids.length; } @Override public double sumTotalOpportunities () { // For now we always have one opportunity per point. return featureCount(); } @Override public double getLat(int i) { return lats[i]; } @Override public double getLon(int i) { return lons[i]; } /** * Write coordinates for these points, in binary format. * Note that this does not save any opportunity magnitudes or densities. We do not use those yet. * Note also that if we ever intend to use these directly in the UI we should switch to a * fixed-width little-endian representation or JSON. */ public void write (OutputStream outputStream) throws IOException { DataOutputStream out = new DataOutputStream(outputStream); // Header // TODO add identifier / version for future sanity checking? // Should name and description be here or in Mongo metadata? out.writeInt(ids.length); for (String id : ids) { out.writeUTF(id); } for (double lat : lats) { out.writeDouble(lat); } for (double lon : lons) { out.writeDouble(lon); } for (double count : counts) { out.writeDouble(count); } out.close(); } public FreeFormPointSet (InputStream inputStream) throws IOException { DataInputStream data = new DataInputStream(inputStream); int nPoints = data.readInt(); this.ids = new String[nPoints]; this.lats = new double[nPoints]; this.lons = new double[nPoints]; this.counts = new double[nPoints]; for (int i = 0; i < nPoints; i++) { ids[i] = data.readUTF(); } for (int i = 0; i < nPoints; i++) { lats[i] = data.readDouble(); } for (int i = 0; i < nPoints; i++) { lons[i] = data.readDouble(); } for (int i = 0; i < nPoints; i++) { counts[i] = data.readDouble(); } data.close(); } @Override public TIntList getPointsInEnvelope (Envelope envelopeFixedDegrees) { // Convert fixed-degree envelope to floating double west = fixedDegreesToFloating(envelopeFixedDegrees.getMinX()); double east = fixedDegreesToFloating(envelopeFixedDegrees.getMaxX()); double north = fixedDegreesToFloating(envelopeFixedDegrees.getMaxY()); double south = fixedDegreesToFloating(envelopeFixedDegrees.getMinY()); TIntList pointsInEnvelope = new TIntArrayList(); // Pixels are truncated toward zero, and coords increase toward East and South in web Mercator, so <= south/east. for (int i = 0; i < lats.length; i++) { if (lats[i] < north && lats[i] > south && lons[i] < east && lons[i] > west) pointsInEnvelope.add(i); } return pointsInEnvelope; } @Override public double getOpportunityCount (int i) { // For now, these points do not have attached opportunity counts. // We consider them to all have a count of 1. return 1D; } @Override public String getId (int i) { return ids[i]; } @Override public Envelope getWgsEnvelope () { if (lats.length == 1 || lons.length == 0) { LOG.error("Attempt to create envelope from empty lat/lon array."); return null; } double minLat = Arrays.stream(lats).min().getAsDouble(); double minLon = Arrays.stream(lons).min().getAsDouble(); double maxLat = Arrays.stream(lats).max().getAsDouble(); double maxLon = Arrays.stream(lons).max().getAsDouble(); Envelope envelope = new Envelope(minLon, maxLon, minLat, maxLat); return envelope; } @Override public WebMercatorExtents getWebMercatorExtents () { final int DEFAULT_ZOOM = 9; Envelope wgsEnvelope = this.getWgsEnvelope(); WebMercatorExtents webMercatorExtents = WebMercatorExtents.forWgsEnvelope(wgsEnvelope, DEFAULT_ZOOM); return webMercatorExtents; } }
Correctly check count field Probably a copy/paste error where the fields were not updated.
src/main/java/com/conveyal/r5/analyst/FreeFormPointSet.java
Correctly check count field
Java
mit
af7cee90d21f4859bde0f7f86d443db992eba9c9
0
gmurray/protorabbit,gmurray/protorabbit
package org.protorabbit.stringtemplate; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.OutputStream; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import org.antlr.stringtemplate.StringTemplate; import org.protorabbit.model.IContext; import org.protorabbit.model.IEngine; import org.protorabbit.model.ITemplate; import org.protorabbit.model.impl.ResourceURI; import org.protorabbit.util.IOUtil; public class StringTemplateEngine implements IEngine { private static Logger logger = null; static final Logger getLogger() { if ( logger == null ) { logger = Logger.getLogger( "org.protrabbit" ); } return logger; } /* * Render a template with the id tid, context, and given outputsteam */ public void renderTemplate( String tid, IContext ctx, OutputStream out) { ITemplate t = ctx.getConfig().getTemplate( tid, ctx ); renderTemplate( t, ctx, out ); } public static void renderTemplate( ITemplate t, IContext ctx, OutputStream out ) { renderTemplate( t, ctx, out, true ); } public static Map<Object,Object> getMetaData( ITemplate t, IContext ctx ) { return renderTemplate( t, ctx, null, true ); } @SuppressWarnings("unchecked") public static Map<Object,Object> renderTemplate( ITemplate t, IContext ctx, OutputStream out, boolean getMetaData ) { Map<Object,Object> metaData = null; ResourceURI uri = t.getTemplateURI(ctx); String baseURI = null; if ( uri != null) { baseURI = uri.getURI(); } else { baseURI = t.getId(); } if ( baseURI == null ) { throw new RuntimeException( "A templateId or templateURI is required for a tempalte to be rendered." ); } String prefix = ""; String baseTemplate = null; if ( t.getDocumentContext() != null ) { int lastPath = baseURI.lastIndexOf("/"); if ( lastPath != -1 ) { prefix = baseURI.substring(0, lastPath + 1 ); baseTemplate = baseURI.substring( lastPath + 1 ); } else { baseTemplate = baseURI; } } else { int lastPath = baseURI.lastIndexOf("/"); if ( lastPath != -1 ) { prefix = baseURI.substring(0, lastPath + 1 ); baseTemplate = baseURI.substring( lastPath + 1 ); } else { baseTemplate = baseURI; } } if ( baseTemplate.endsWith(".st") ) { baseTemplate = baseTemplate.substring(0, baseTemplate.length() - 3 ); } StringTemplate st2 = null; STGroupDynamic group = new STGroupDynamic( ctx, prefix ); if ( t.getDocumentContext() != null ) { if ( t.getDocumentContext().getDocument() != null ) { st2 = group.loadTemplate( t.getId(), t.getDocumentContext().getDocument() ); } else { getLogger().log( Level.SEVERE, "Given a empty document with id " + t.getId() ); } } else { st2 = group.loadTemplate( baseTemplate ); } if ( st2 != null) { // copy in the ctx props Set<String> set = ctx.getAttributes().keySet(); Iterator<String> it = set.iterator(); Map<String,Object> atts = new HashMap<String,Object>(); while ( it.hasNext() ) { String key = it.next(); if ( !key.startsWith("org.protorabbit.")) { atts.put( key , ctx.getAttribute(key) ); } } st2.setAttributes( atts ); String result = st2.toString(); // get the meta data if ( getMetaData ) { metaData = st2.getPostProcessMetaData(); Set<String> templates = st2.getGroup().getTemplateNames(); Iterator<String> myIt = templates.iterator(); Map<String,Map<String,Object>> subtemplates = new HashMap<String,Map<String,Object>>(); metaData.put("subTemplates", subtemplates ); while ( myIt.hasNext() ) { String templateName = myIt.next(); if ( templateName.equals( st2.getName() )) { // skip the top level continue; } StringTemplate _st = st2.getGroup().getInstanceOf( templateName ); if ( _st != null ) { // mixin attribute set _st.setAttributes( atts ); _st.toString(); Map<String, Object> _meta = _st.getPostProcessMetaData() ; subtemplates.put( templateName, _meta ); } } } if ( out != null ) { ByteArrayInputStream bis = new ByteArrayInputStream( result.getBytes() ); try { IOUtil.writeBinaryResource( bis, out ); } catch (IOException e) { getLogger().log( Level.SEVERE, "Error rendering template " + t.getId(), e ); } } } else { getLogger().log( Level.WARNING, "Could not find template " + t.getId() ); } return metaData; } }
src/org/protorabbit/stringtemplate/StringTemplateEngine.java
package org.protorabbit.stringtemplate; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.OutputStream; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import org.antlr.stringtemplate.StringTemplate; import org.protorabbit.model.IContext; import org.protorabbit.model.IEngine; import org.protorabbit.model.ITemplate; import org.protorabbit.model.impl.ResourceURI; import org.protorabbit.util.IOUtil; public class StringTemplateEngine implements IEngine { private static Logger logger = null; static final Logger getLogger() { if ( logger == null ) { logger = Logger.getLogger( "org.protrabbit" ); } return logger; } /* * Render a template with the id tid, context, and given outputsteam */ public void renderTemplate( String tid, IContext ctx, OutputStream out) { ITemplate t = ctx.getConfig().getTemplate( tid, ctx ); renderTemplate( t, ctx, out ); } public static void renderTemplate( ITemplate t, IContext ctx, OutputStream out ) { renderTemplate( t, ctx, out, true ); } public static Map<Object,Object> getMetaData( ITemplate t, IContext ctx ) { return renderTemplate( t, ctx, null, true ); } @SuppressWarnings("unchecked") public static Map<Object,Object> renderTemplate( ITemplate t, IContext ctx, OutputStream out, boolean getMetaData ) { Map<Object,Object> metaData = null; ResourceURI uri = t.getTemplateURI(ctx); String baseURI = null; if ( uri != null) { baseURI = uri.getURI(); } else { baseURI = t.getId(); } String prefix = ""; String baseTemplate = null; if ( t.getDocumentContext() != null ) { int lastPath = baseURI.lastIndexOf("/"); if ( lastPath != -1 ) { prefix = baseURI.substring(0, lastPath + 1 ); baseTemplate = baseURI.substring( lastPath + 1 ); } else { baseTemplate = baseURI; } } else { int lastPath = baseURI.lastIndexOf("/"); if ( lastPath != -1 ) { prefix = baseURI.substring(0, lastPath + 1 ); baseTemplate = baseURI.substring( lastPath + 1 ); } else { baseTemplate = baseURI; } } if ( baseTemplate.endsWith(".st") ) { baseTemplate = baseTemplate.substring(0, baseTemplate.length() - 3 ); } StringTemplate st2 = null; STGroupDynamic group = new STGroupDynamic( ctx, prefix ); if ( t.getDocumentContext() != null ) { if ( t.getDocumentContext().getDocument() != null ) { st2 = group.loadTemplate( t.getId(), t.getDocumentContext().getDocument() ); } else { getLogger().log( Level.SEVERE, "Given a empty document with id " + t.getId() ); } } else { st2 = group.loadTemplate( baseTemplate ); } if ( st2 != null) { // copy in the ctx props Set<String> set = ctx.getAttributes().keySet(); Iterator<String> it = set.iterator(); Map<String,Object> atts = new HashMap<String,Object>(); while ( it.hasNext() ) { String key = it.next(); if ( !key.startsWith("org.protorabbit.")) { atts.put( key , ctx.getAttribute(key) ); } } st2.setAttributes( atts ); String result = st2.toString(); // get the meta data if ( getMetaData ) { metaData = st2.getPostProcessMetaData(); Set<String> templates = st2.getGroup().getTemplateNames(); Iterator<String> myIt = templates.iterator(); Map<String,Map<String,Object>> subtemplates = new HashMap<String,Map<String,Object>>(); metaData.put("subTemplates", subtemplates ); while ( myIt.hasNext() ) { String templateName = myIt.next(); if ( templateName.equals( st2.getName() )) { // skip the top level continue; } StringTemplate _st = st2.getGroup().getInstanceOf( templateName ); // mixin attribute set _st.setAttributes( atts ); _st.toString(); Map<String, Object> _meta = _st.getPostProcessMetaData() ; subtemplates.put( templateName, _meta ); } } if ( out != null ) { ByteArrayInputStream bis = new ByteArrayInputStream( result.getBytes() ); try { IOUtil.writeBinaryResource( bis, out ); } catch (IOException e) { getLogger().log( Level.SEVERE, "Error rendering template " + t.getId(), e ); } } } else { getLogger().log( Level.WARNING, "Could not find template " + t.getId() ); } return metaData; } }
latest and greatest.
src/org/protorabbit/stringtemplate/StringTemplateEngine.java
latest and greatest.
Java
mit
4bf4ccf0c41e86362b60d0f6d459c3a37ea7ceba
0
DDoS/SpongeVanilla,CodeKingdomsTeam/SpongeVanilla,kenzierocks/SpongeVanilla,kenzierocks/SpongeVanilla,CodeKingdomsTeam/SpongeVanilla,DDoS/SpongeVanilla
/* * License (MIT) * * Copyright (c) 2014-2015 Granite Team * * Permission is hereby granted, free of charge, to any person obtaining a copy of this * software and associated documentation files (the "Software"), to deal in the * Software without restriction, including without limitation the rights to use, copy, * modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, * and to permit persons to whom the Software is furnished to do so, subject to the * following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.granitepowered.granite; import com.github.kevinsawicki.http.HttpRequest; import com.google.common.base.Throwables; import com.google.inject.Guice; import com.google.inject.Injector; import javassist.ClassPool; import javassist.NotFoundException; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.granitepowered.granite.bytecode.BytecodeModifier; import org.granitepowered.granite.bytecode.classes.*; import org.granitepowered.granite.impl.GraniteServer; import org.granitepowered.granite.impl.event.state.GraniteConstructionEvent; import org.granitepowered.granite.impl.event.state.GraniteInitializationEvent; import org.granitepowered.granite.impl.event.state.GraniteLoadCompleteEvent; import org.granitepowered.granite.impl.event.state.GranitePostInitializationEvent; import org.granitepowered.granite.impl.event.state.GranitePreInitializationEvent; import org.granitepowered.granite.impl.guice.GraniteGuiceModule; import org.granitepowered.granite.impl.text.chat.GraniteChatType; import org.granitepowered.granite.impl.text.format.GraniteTextColor; import org.granitepowered.granite.mappings.Mappings; import org.granitepowered.granite.util.ReflectionUtils; import org.slf4j.LoggerFactory; import org.spongepowered.api.Game; import org.spongepowered.api.text.action.GraniteTextActionFactory; import org.spongepowered.api.text.action.TextActions; import org.spongepowered.api.text.chat.ChatTypes; import org.spongepowered.api.text.chat.GraniteChatTypeFactory; import org.spongepowered.api.text.format.GraniteTextFormatFactory; import org.spongepowered.api.text.format.TextColors; import org.spongepowered.api.text.format.TextStyle; import org.spongepowered.api.text.format.TextStyles; import org.spongepowered.api.text.message.GraniteMessageFactory; import org.spongepowered.api.text.message.Messages; import org.spongepowered.api.text.title.GraniteTitleFactory; import org.spongepowered.api.text.title.Titles; import org.spongepowered.api.text.translation.GraniteTranslationFactory; import org.spongepowered.api.text.translation.Translations; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.jar.JarEntry; import java.util.jar.JarFile; public class GraniteStartupThread extends Thread { String[] args; BytecodeModifier modifier; String serverVersion = "UNKNOWN"; String apiVersion = "UNKNOWN"; String buildNumber = "UNKNOWN"; public GraniteStartupThread(String args[]) { this.args = args; this.setName("Granite Startup"); } public void run() { try { Properties versionProp = new Properties(); InputStream versionIn = java.lang.ClassLoader.getSystemClassLoader().getResourceAsStream("granite.version"); if (versionIn != null) { try { versionProp.load(versionIn); String server = versionProp.getProperty("server"); if (server != null) { serverVersion = server; } String api = versionProp.getProperty("api"); if (api != null) { apiVersion = api; } String build = versionProp.getProperty("build"); if (build != null && !build.equals("NA")) { buildNumber = build; } } catch (IOException ignored) { } finally { try { versionIn.close(); } catch (IOException ignored) { } } } Injector injector = Guice.createInjector(new GraniteGuiceModule()); Granite.instance = injector.getInstance(Granite.class); Granite.instance.version = serverVersion; Granite.instance.apiVersion = apiVersion; Granite.instance.logger = LoggerFactory.getLogger("Granite"); Granite.instance.serverConfig = new ServerConfig(); Granite.instance.classPool = ClassPool.getDefault(); Granite.instance.eventManager.post(new GraniteConstructionEvent()); Granite.instance.classesDir = new File("classes/"); Granite.instance.classesDir.mkdirs(); Granite.instance.createGson(); loadMinecraftToClassPool(); Mappings.load(); modifyBytecode(); loadClasses(); bootstrap(); Granite.instance.gameRegistry.register(); injectSpongeFields(); Granite.instance.pluginManager.loadPlugins(); Granite.instance.server = (GraniteServer) injector.getInstance(Game.class); Granite.instance.eventManager.post(new GranitePreInitializationEvent()); Granite.instance.eventManager.post(new GraniteInitializationEvent()); Granite.instance.eventManager.post(new GranitePostInitializationEvent()); Granite.instance.eventManager.post(new GraniteLoadCompleteEvent()); Granite.instance.getLogger() .info("Starting Granite version " + serverVersion + " build " + buildNumber + " implementing API version " + apiVersion + "..."); Date date = new Date(); String day = new SimpleDateFormat("dd").format(date); String month = new SimpleDateFormat("MM").format(date); String year = new SimpleDateFormat("yyyy").format(date); if (Objects.equals(day + month, "0101")) { Granite.instance.getLogger().info("HAPPY NEW YEAR!"); } if (Objects.equals(day + month, "2704")) { Granite.instance.getLogger().info("Happy Birthday matthijs2704!"); } if (Objects.equals(day + month, "2208")) { Granite.instance.getLogger().info("Happy Birthday Voltasalt!"); } if (Objects.equals(day + month, "0709")) { String start = "2014"; Granite.instance.getLogger() .info("Happy Birthday Granite! Granite is " + Integer.toString(Integer.parseInt(year) - Integer.parseInt(start)) + " today!"); } if (Objects.equals(day + month, "2310")) { Granite.instance.getLogger().info("Happy Birthday AzureusNation!"); } if (Objects.equals(day + month, "3110")) { Granite.instance.getLogger().info("Happy Halloween!"); } if (Objects.equals(day + month, "2412")) { Granite.instance.getLogger().info("Santa is getting ready!"); } if (Objects.equals(day + month, "2512")) { Granite.instance.getLogger().info("Merry Christmas/Happy Holidays!"); } if (Objects.equals(day + month, "3112")) { Granite.instance.getLogger().info("New Years Eve. Make way for " + Integer.toString(Integer.parseInt(year) + 1) + "!"); } } catch (Throwable t) { Granite.error("We did a REALLY BIG boo-boo :'(", t); } } private void loadClasses() { try { Method method = URLClassLoader.class.getDeclaredMethod("addURL", URL.class); method.setAccessible(true); method.invoke(ClassLoader.getSystemClassLoader(), Granite.instance.getClassesDir().toURI().toURL()); modifier.post(); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | IOException e) { Throwables.propagate(e); } } private void injectSpongeFields() { Granite.instance.getLogger().info("Injecting Sponge fields"); injectConstant(Messages.class, "factory", new GraniteMessageFactory()); injectConstant(TextStyles.class, "factory", new GraniteTextFormatFactory()); injectConstant(TextActions.class, "factory", new GraniteTextActionFactory()); injectConstant(Translations.class, "factory", new GraniteTranslationFactory()); injectConstant(ChatTypes.class, "factory", new GraniteChatTypeFactory()); injectConstant(Titles.class, "factory", new GraniteTitleFactory()); injectEnumConstants(TextColors.class, GraniteTextColor.class); Map<String, TextStyle.Base> styles = new HashMap<>(); for (Map.Entry<String, TextStyle.Base> entry : GraniteTextFormatFactory.styles.entrySet()) { styles.put(entry.getKey().toUpperCase(), entry.getValue()); } injectConstants(TextStyles.class, styles); injectEnumConstants(ChatTypes.class, GraniteChatType.class); } private void injectEnumConstants(Class<?> destination, Class<? extends Enum> source) { for (Enum constant : source.getEnumConstants()) { injectConstant(destination, constant.name(), constant); } } private void injectConstants(Class<?> clazz, Map<String, ?> objects) { for (Map.Entry<String, ?> entry : objects.entrySet()) { injectConstant(clazz, entry.getKey(), entry.getValue()); } } private void injectConstant(Class<?> clazz, String name, Object value) { try { Field f = clazz.getDeclaredField(name); ReflectionUtils.forceAccessible(f); f.set(null, value); } catch (NoSuchFieldException | IllegalAccessException e) { Throwables.propagate(e); } } private void modifyBytecode() { File buildNumberFile = new File(Granite.instance.getClassesDir(), "buildnumber.txt"); try { modifier = new BytecodeModifier(); modifier.add(new CommandHandlerClass()); modifier.add(new DedicatedServerClass()); modifier.add(new EntityClass()); modifier.add(new EntityPlayerMPClass()); modifier.add(new ItemInWorldManagerClass()); modifier.add(new ItemStackClass()); modifier.add(new NetHandlerPlayServerClass()); modifier.add(new ServerConfigurationManagerClass()); modifier.add(new WorldProviderClass()); modifier.add(new InstantiatorClass()); if (buildNumber.equals("UNKNOWN") || !buildNumberFile.exists() || !Objects.equals(FileUtils.readFileToString(buildNumberFile), buildNumber)) { Granite.instance.getLogger().info("Modifying bytecode"); if (Granite.instance.classesDir.exists()) { File oldClassesDir = new File(Granite.instance.classesDir.getParentFile(), Granite.instance.classesDir.getName() + "_old"); FileUtils.moveDirectory(Granite.instance.classesDir, oldClassesDir); FileUtils.deleteDirectory(oldClassesDir); } try { JarFile file = new JarFile(Granite.instance.getServerConfig().getMinecraftJar()); Enumeration<JarEntry> entries = file.entries(); while (entries.hasMoreElements()) { JarEntry entry = entries.nextElement(); File f = new File(Granite.instance.getClassesDir() + java.io.File.separator + entry.getName()); if (entry.isDirectory()) { f.mkdirs(); } else { FileOutputStream os = new FileOutputStream(f); IOUtils.copy(file.getInputStream(entry), os); os.close(); } } modifier.modify(); FileUtils.write(buildNumberFile, buildNumber + ""); } catch (IOException e) { Throwables.propagate(e); } } else { Granite.instance.getLogger().info("Found pre-modified bytecode in classes/, loading that"); } } catch (IOException e) { Throwables.propagate(e); } } private void bootstrap() { Granite.instance.getLogger().info("Bootstrapping Minecraft"); Mappings.invokeStatic("Bootstrap", "func_151354_b"); } private void loadMinecraftToClassPool() { File minecraftJar = Granite.instance.getServerConfig().getMinecraftJar(); if (!minecraftJar.exists()) { Granite.instance.getLogger().warn("Could not find Minecraft .jar, downloading"); HttpRequest req = HttpRequest.get("https://s3.amazonaws.com/Minecraft.Download/versions/1.8.1/minecraft_server.1.8.1.jar"); if (req.code() == 404) { throw new RuntimeException("Minecraft 404 error whilst trying to download"); } else if (req.code() == 200) { req.receive(minecraftJar); Granite.instance.getLogger().info("Minecraft Downloaded"); } } Granite.instance.getLogger().info("Loading " + minecraftJar.getName()); try { Granite.getInstance().classPool.insertClassPath(minecraftJar.getName()); } catch (NotFoundException e) { Throwables.propagate(e); } } }
src/main/java/org/granitepowered/granite/GraniteStartupThread.java
/* * License (MIT) * * Copyright (c) 2014-2015 Granite Team * * Permission is hereby granted, free of charge, to any person obtaining a copy of this * software and associated documentation files (the "Software"), to deal in the * Software without restriction, including without limitation the rights to use, copy, * modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, * and to permit persons to whom the Software is furnished to do so, subject to the * following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.granitepowered.granite; import com.github.kevinsawicki.http.HttpRequest; import com.google.common.base.Throwables; import com.google.inject.Guice; import com.google.inject.Injector; import javassist.ClassPool; import javassist.NotFoundException; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.granitepowered.granite.bytecode.BytecodeModifier; import org.granitepowered.granite.bytecode.classes.CommandHandlerClass; import org.granitepowered.granite.bytecode.classes.DedicatedServerClass; import org.granitepowered.granite.bytecode.classes.EntityClass; import org.granitepowered.granite.bytecode.classes.EntityPlayerMPClass; import org.granitepowered.granite.bytecode.classes.ItemInWorldManagerClass; import org.granitepowered.granite.bytecode.classes.ItemStackClass; import org.granitepowered.granite.bytecode.classes.NetHandlerPlayServerClass; import org.granitepowered.granite.bytecode.classes.ServerConfigurationManagerClass; import org.granitepowered.granite.bytecode.classes.WorldProviderClass; import org.granitepowered.granite.impl.GraniteServer; import org.granitepowered.granite.impl.event.state.GraniteConstructionEvent; import org.granitepowered.granite.impl.event.state.GraniteInitializationEvent; import org.granitepowered.granite.impl.event.state.GraniteLoadCompleteEvent; import org.granitepowered.granite.impl.event.state.GranitePostInitializationEvent; import org.granitepowered.granite.impl.event.state.GranitePreInitializationEvent; import org.granitepowered.granite.impl.guice.GraniteGuiceModule; import org.granitepowered.granite.impl.text.chat.GraniteChatType; import org.granitepowered.granite.impl.text.format.GraniteTextColor; import org.granitepowered.granite.mappings.Mappings; import org.granitepowered.granite.util.ReflectionUtils; import org.slf4j.LoggerFactory; import org.spongepowered.api.Game; import org.spongepowered.api.text.action.GraniteTextActionFactory; import org.spongepowered.api.text.action.TextActions; import org.spongepowered.api.text.chat.ChatTypes; import org.spongepowered.api.text.chat.GraniteChatTypeFactory; import org.spongepowered.api.text.format.GraniteTextFormatFactory; import org.spongepowered.api.text.format.TextColors; import org.spongepowered.api.text.format.TextStyle; import org.spongepowered.api.text.format.TextStyles; import org.spongepowered.api.text.message.GraniteMessageFactory; import org.spongepowered.api.text.message.Messages; import org.spongepowered.api.text.title.GraniteTitleFactory; import org.spongepowered.api.text.title.Titles; import org.spongepowered.api.text.translation.GraniteTranslationFactory; import org.spongepowered.api.text.translation.Translations; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.jar.JarEntry; import java.util.jar.JarFile; public class GraniteStartupThread extends Thread { String[] args; BytecodeModifier modifier; String serverVersion = "UNKNOWN"; String apiVersion = "UNKNOWN"; String buildNumber = "UNKNOWN"; public GraniteStartupThread(String args[]) { this.args = args; this.setName("Granite Startup"); } public void run() { try { Properties versionProp = new Properties(); InputStream versionIn = java.lang.ClassLoader.getSystemClassLoader().getResourceAsStream("granite.version"); if (versionIn != null) { try { versionProp.load(versionIn); String server = versionProp.getProperty("server"); if (server != null) { serverVersion = server; } String api = versionProp.getProperty("api"); if (api != null) { apiVersion = api; } String build = versionProp.getProperty("build"); if (build != null && !build.equals("NA")) { buildNumber = build; } } catch (IOException ignored) { } finally { try { versionIn.close(); } catch (IOException ignored) { } } } Injector injector = Guice.createInjector(new GraniteGuiceModule()); Granite.instance = injector.getInstance(Granite.class); Granite.instance.version = serverVersion; Granite.instance.apiVersion = apiVersion; Granite.instance.logger = LoggerFactory.getLogger("Granite"); Granite.instance.serverConfig = new ServerConfig(); Granite.instance.classPool = ClassPool.getDefault(); Granite.instance.eventManager.post(new GraniteConstructionEvent()); Granite.instance.classesDir = new File("classes/"); Granite.instance.classesDir.mkdirs(); Granite.instance.createGson(); loadMinecraftToClassPool(); Mappings.load(); modifyBytecode(); loadClasses(); bootstrap(); Granite.instance.gameRegistry.register(); injectSpongeFields(); Granite.instance.pluginManager.loadPlugins(); Granite.instance.server = (GraniteServer) injector.getInstance(Game.class); Granite.instance.eventManager.post(new GranitePreInitializationEvent()); Granite.instance.eventManager.post(new GraniteInitializationEvent()); Granite.instance.eventManager.post(new GranitePostInitializationEvent()); Granite.instance.eventManager.post(new GraniteLoadCompleteEvent()); Granite.instance.getLogger() .info("Starting Granite version " + serverVersion + " build " + buildNumber + " implementing API version " + apiVersion + "..."); Date date = new Date(); String day = new SimpleDateFormat("dd").format(date); String month = new SimpleDateFormat("MM").format(date); String year = new SimpleDateFormat("yyyy").format(date); if (Objects.equals(day + month, "0101")) { Granite.instance.getLogger().info("HAPPY NEW YEAR!"); } if (Objects.equals(day + month, "2704")) { Granite.instance.getLogger().info("Happy Birthday matthijs2704!"); } if (Objects.equals(day + month, "2208")) { Granite.instance.getLogger().info("Happy Birthday Voltasalt!"); } if (Objects.equals(day + month, "0709")) { String start = "2014"; Granite.instance.getLogger() .info("Happy Birthday Granite! Granite is " + Integer.toString(Integer.parseInt(year) - Integer.parseInt(start)) + " today!"); } if (Objects.equals(day + month, "2310")) { Granite.instance.getLogger().info("Happy Birthday AzureusNation!"); } if (Objects.equals(day + month, "3110")) { Granite.instance.getLogger().info("Happy Halloween!"); } if (Objects.equals(day + month, "2412")) { Granite.instance.getLogger().info("Santa is getting ready!"); } if (Objects.equals(day + month, "2512")) { Granite.instance.getLogger().info("Merry Christmas/Happy Holidays!"); } if (Objects.equals(day + month, "3112")) { Granite.instance.getLogger().info("New Years Eve. Make way for " + Integer.toString(Integer.parseInt(year) + 1) + "!"); } } catch (Throwable t) { Granite.error("We did a REALLY BIG boo-boo :'(", t); } } private void loadClasses() { try { Method method = URLClassLoader.class.getDeclaredMethod("addURL", URL.class); method.setAccessible(true); method.invoke(ClassLoader.getSystemClassLoader(), Granite.instance.getClassesDir().toURI().toURL()); modifier.post(); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | IOException e) { Throwables.propagate(e); } } private void injectSpongeFields() { Granite.instance.getLogger().info("Injecting Sponge fields"); injectConstant(Messages.class, "factory", new GraniteMessageFactory()); injectConstant(TextStyles.class, "factory", new GraniteTextFormatFactory()); injectConstant(TextActions.class, "factory", new GraniteTextActionFactory()); injectConstant(Translations.class, "factory", new GraniteTranslationFactory()); injectConstant(ChatTypes.class, "factory", new GraniteChatTypeFactory()); injectConstant(Titles.class, "factory", new GraniteTitleFactory()); injectEnumConstants(TextColors.class, GraniteTextColor.class); Map<String, TextStyle.Base> styles = new HashMap<>(); for (Map.Entry<String, TextStyle.Base> entry : GraniteTextFormatFactory.styles.entrySet()) { styles.put(entry.getKey().toUpperCase(), entry.getValue()); } injectConstants(TextStyles.class, styles); injectEnumConstants(ChatTypes.class, GraniteChatType.class); } private void injectEnumConstants(Class<?> destination, Class<? extends Enum> source) { for (Enum constant : source.getEnumConstants()) { injectConstant(destination, constant.name(), constant); } } private void injectConstants(Class<?> clazz, Map<String, ?> objects) { for (Map.Entry<String, ?> entry : objects.entrySet()) { injectConstant(clazz, entry.getKey(), entry.getValue()); } } private void injectConstant(Class<?> clazz, String name, Object value) { try { Field f = clazz.getDeclaredField(name); ReflectionUtils.forceAccessible(f); f.set(null, value); } catch (NoSuchFieldException | IllegalAccessException e) { Throwables.propagate(e); } } private void modifyBytecode() { File buildNumberFile = new File(Granite.instance.getClassesDir(), "buildnumber.txt"); try { modifier = new BytecodeModifier(); modifier.add(new CommandHandlerClass()); modifier.add(new DedicatedServerClass()); modifier.add(new EntityClass()); modifier.add(new EntityPlayerMPClass()); modifier.add(new ItemInWorldManagerClass()); modifier.add(new ItemStackClass()); modifier.add(new NetHandlerPlayServerClass()); modifier.add(new ServerConfigurationManagerClass()); modifier.add(new WorldProviderClass()); modifier.add(new InstantiatorClass()); if (buildNumber.equals("UNKNOWN") || !buildNumberFile.exists() || !Objects.equals(FileUtils.readFileToString(buildNumberFile), buildNumber)) { Granite.instance.getLogger().info("Modifying bytecode"); if (Granite.instance.classesDir.exists()) { File oldClassesDir = new File(Granite.instance.classesDir.getParentFile(), Granite.instance.classesDir.getName() + "_old"); FileUtils.moveDirectory(Granite.instance.classesDir, oldClassesDir); FileUtils.deleteDirectory(oldClassesDir); } try { JarFile file = new JarFile(Granite.instance.getServerConfig().getMinecraftJar()); Enumeration<JarEntry> entries = file.entries(); while (entries.hasMoreElements()) { JarEntry entry = entries.nextElement(); File f = new File(Granite.instance.getClassesDir() + java.io.File.separator + entry.getName()); if (entry.isDirectory()) { f.mkdirs(); } else { FileOutputStream os = new FileOutputStream(f); IOUtils.copy(file.getInputStream(entry), os); os.close(); } } modifier.modify(); FileUtils.write(buildNumberFile, buildNumber + ""); } catch (IOException e) { Throwables.propagate(e); } } else { Granite.instance.getLogger().info("Found pre-modified bytecode in classes/, loading that"); } } catch (IOException e) { Throwables.propagate(e); } } private void bootstrap() { Granite.instance.getLogger().info("Bootstrapping Minecraft"); Mappings.invokeStatic("Bootstrap", "func_151354_b"); } private void loadMinecraftToClassPool() { File minecraftJar = Granite.instance.getServerConfig().getMinecraftJar(); if (!minecraftJar.exists()) { Granite.instance.getLogger().warn("Could not find Minecraft .jar, downloading"); HttpRequest req = HttpRequest.get("https://s3.amazonaws.com/Minecraft.Download/versions/1.8.1/minecraft_server.1.8.1.jar"); if (req.code() == 404) { throw new RuntimeException("Minecraft 404 error whilst trying to download"); } else if (req.code() == 200) { req.receive(minecraftJar); Granite.instance.getLogger().info("Minecraft Downloaded"); } } Granite.instance.getLogger().info("Loading " + minecraftJar.getName()); try { Granite.getInstance().classPool.insertClassPath(minecraftJar.getName()); } catch (NotFoundException e) { Throwables.propagate(e); } } }
Fix building
src/main/java/org/granitepowered/granite/GraniteStartupThread.java
Fix building
Java
mit
18077356e212ea2ab5c4c4e08a5b10c98cc1f50d
0
yzhnasa/TASSEL-iRods,yzhnasa/TASSEL-iRods,yzhnasa/TASSEL-iRods,yzhnasa/TASSEL-iRods
/* * TagsToSNPByAlignmentPlugin */ package net.maizegenetics.gbs.pipeline; import cern.colt.GenericSorting; import cern.colt.Swapper; import cern.colt.function.IntComparator; import java.awt.Frame; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.util.Arrays; import java.util.HashMap; import javax.swing.ImageIcon; import net.maizegenetics.gbs.maps.TagsAtLocus; import net.maizegenetics.gbs.maps.TagsOnPhysicalMap; import net.maizegenetics.gbs.tagdist.TagsByTaxa; import net.maizegenetics.gbs.tagdist.TagsByTaxaBitFileMap; import net.maizegenetics.gbs.tagdist.TagsByTaxaByteFileMap; import net.maizegenetics.gbs.tagdist.TagsByTaxaByteHDF5TagGroups; import net.maizegenetics.util.ArgsEngine; import net.maizegenetics.gbs.util.BaseEncoder; import net.maizegenetics.pal.alignment.Alignment; import net.maizegenetics.pal.alignment.AlignmentUtils; import net.maizegenetics.pal.alignment.ExportUtils; import net.maizegenetics.pal.alignment.Locus; import net.maizegenetics.pal.alignment.MutableNucleotideAlignment; import net.maizegenetics.pal.alignment.MutableVCFAlignment; import net.maizegenetics.pal.alignment.NucleotideAlignmentConstants; import net.maizegenetics.pal.ids.IdGroup; import net.maizegenetics.pal.ids.SimpleIdGroup; import net.maizegenetics.plugindef.AbstractPlugin; import net.maizegenetics.plugindef.DataSet; import net.maizegenetics.util.Utils; import org.apache.log4j.Logger; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.SimpleLayout; import org.biojava3.core.util.ConcurrencyTools; /** * This class aligns tags at the same physical location against one another, * calls SNPs, and then outputs the SNPs to a HapMap file. * * It is multi-threaded, as there are substantial speed increases with it. * * @author edbuckler */ public class TagsToSNPByAlignmentPlugin extends AbstractPlugin { static int maxSize = 200000; //normally 200K; private double minF = -2.0, minMAF = 0.01; private int minMAC = 10; // static boolean ignoreTriallelic=false; private boolean inclRare = false; // false = only call the two most common alleles at a site private boolean inclGaps = false; // false = ignore sites where the major or the 1st minor alleles are gaps private boolean callBiallelicSNPsWithGap = false; // true = call sites with a biallelic SNP plus a gap (e.g., A/C/-) private boolean isUpdateTOPM = false; private boolean useTBTByte = false; static double defaultMinPropTaxaWithLocus = 0.1; private static Logger myLogger = Logger.getLogger(TagsToSNPByAlignmentPlugin.class); TagsOnPhysicalMap theTOPM = null; TagsByTaxa theTBT = null; File inputFile = null; private String inTOPMFile = null; private String outTOPMFile = null; private boolean usePedigree = false; HashMap<String, Double> taxaFs = null; boolean[] useTaxaForMinF = null; int nInbredTaxa = Integer.MIN_VALUE; String suppliedOutputFileName; boolean vcf = false; int startChr = Integer.MAX_VALUE; int endChr = Integer.MIN_VALUE; private static ArgsEngine myArgsEngine = null; int minTaxaWithLocus; private double errorRate = 0.01; private boolean includeReference = false; private String refGenomeFileStr = null; private long[] refGenomeChr = null; private boolean fuzzyStartPositions = false; int locusBorder = 0; final static int CHR = 0, STRAND = 1, START_POS = 2; // indices of these position attributes in array returned by theTOPM.getPositionArray(a) private boolean customSNPLogging = true; // a custom SNP log that collects useful info for filtering SNPs through machine learning criteria private CustomSNPLog myCustomSNPLog = null; private boolean customFiltering = false; // variables for calculating OS and PL for VCF, might not be in the correct class private static double error; private static double v1; private static double v2; private static double v3; private static HashMap<String, int[]> myGenoScoreMap; public TagsToSNPByAlignmentPlugin() { super(null, false); } public TagsToSNPByAlignmentPlugin(Frame parentFrame) { super(parentFrame, false); } @Override public DataSet performFunction(DataSet input) { myLogger.info("Finding SNPs in " + inputFile.getAbsolutePath() + "."); myLogger.info(String.format("StartChr:%d EndChr:%d %n", startChr, endChr)); theTOPM.sortTable(true); myLogger.info("\nAs a check, here are the first 5 tags in the TOPM (sorted by position):"); theTOPM.printRows(5, true, true); for (int chr = startChr; chr <= endChr; chr++) { myLogger.info("\n\nProcessing chromosome " + chr + "..."); String out = suppliedOutputFileName.replace("+", "" + chr); if (customSNPLogging) myCustomSNPLog = new CustomSNPLog(out, false); myLogger.info("Creating Mutable Alignment to hold genotypes for chr" + chr + " (maximum number of sites = " + maxSize + ")"); MutableNucleotideAlignment theMSA = vcf ? createMutableVCFAlignment(theTBT, maxSize + 100, includeReference) : createMutableAlignment(theTBT, maxSize + 100, includeReference); if (includeReference) { refGenomeChr = readReferenceGenomeChr(refGenomeFileStr, chr); if (refGenomeChr == null) continue; } runTagsToSNPByAlignment(theMSA, out, chr, false); if (customSNPLogging) myCustomSNPLog.close(); myLogger.info("Finished processing chromosome " + chr + "\n\n"); } if (this.isUpdateTOPM) { if (outTOPMFile.endsWith(".txt")) { theTOPM.writeTextFile(new File(outTOPMFile)); } else { theTOPM.writeBinaryFile(new File(outTOPMFile)); } } ConcurrencyTools.shutdown(); return null; } private void printUsage() { myLogger.info( "\n\n\nThe available options for the TagsToSNPByAlignmentPlugin are as follows:\n" + "-i Input .tbt file\n" + "-y Use byte-formatted TBT file (*.tbt.byte)\n" + "-m TagsOnPhysicalMap file containing genomic positions of tags\n" + "-mUpd Update TagsOnPhysicalMap file with allele calls for Production Pipeline, save to specified file (default: no updating)\n" + "-o Output HapMap file. Use a plus sign (+) as a wild card character in place of the chromosome number\n" + " (e.g., /path/hapmap/myGBSGenos.chr+.hmp.txt)\n" + "-vcf Output a VCF file (*.vcf) as well as the default HapMap (*.hmp.txt) (default: "+vcf+")\n" + "-mxSites Maximum number of sites (SNPs) output per chromosome (default: " + maxSize + ")\n" + "-mnF Minimum F (inbreeding coefficient) (default: " + minF + " = no filter)\n" + "-p Pedigree file containing full sample names (or expected names after merging) & expected inbreeding\n" + " coefficient (F) for each. Only taxa with expected F >= mnF used to calculate F = 1-Ho/He.\n" + " (default: use ALL taxa to calculate F)\n" + "-mnMAF Minimum minor allele frequency (default: " + minMAF + ")\n" + "-mnMAC Minimum minor allele count (default: " + minMAC + ")\n" + "-mnLCov Minimum locus coverage (proportion of Taxa with a genotype) (default: " + defaultMinPropTaxaWithLocus + ")\n" + "-errRate Average sequencing error rate per base (used to decide between heterozygous and homozygous calls) (default: "+errorRate+")\n" + "-ref Path to reference genome in fasta format. Ensures that a tag from the reference genome is always included\n" + " when the tags at a locus are aligned against each other to call SNPs. The reference allele for each site\n" + " is then provided in the output HapMap files, under the taxon name \"REFERENCE_GENOME\" (first taxon).\n" + " DEFAULT: Don't use reference genome.\n" // + "-LocusBorder All tags on either strand with start postions that differ by less than the specified\n" // + " integer (LocusBorder) are aligned to the reference genome to call SNPs at a locus.\n" // + " By default (without the -LocusBorder option), only tags with identical start postions and\n" // + " strand are grouped as a locus.\n" // + " Use of the -LocusBorder option requires that the -ref option is also invoked.\n" + "-inclRare Include the rare alleles at site (3 or 4th states) (default: " + inclRare + ")\n" + "-inclGaps Include sites where major or minor allele is a GAP (default: " + inclGaps + ")\n" + "-callBiSNPsWGap Include sites where the third allele is a GAP (default: " + callBiallelicSNPsWithGap + ") (mutually exclusive with inclGaps)\n" + "-sC Start chromosome\n" + "-eC End chromosome\n\n\n"); } @Override public void setParameters(String[] args) { myLogger.addAppender(new ConsoleAppender(new SimpleLayout())); if (args.length == 0) { printUsage(); throw new IllegalArgumentException("\n\nPlease use the above arguments/options.\n\n"); } if (myArgsEngine == null) { myArgsEngine = new ArgsEngine(); myArgsEngine.add("-i", "--input-file", true); myArgsEngine.add("-y", "--useTBTByte", false); myArgsEngine.add("-m", "--physical-map", true); myArgsEngine.add("-mUpd", "--update-physical-map", true); myArgsEngine.add("-o", "--output-directory", true); myArgsEngine.add("-vcf", "--output_vcf", false); myArgsEngine.add("-mxSites", "--max-sites-per-chr", true); myArgsEngine.add("-mnF", "--minFInbreeding", true); myArgsEngine.add("-p", "--pedigree-file", true); myArgsEngine.add("-mnMAF", "--minMinorAlleleFreq", true); myArgsEngine.add("-mnMAC", "--minMinorAlleleCount", true); myArgsEngine.add("-mnLCov", "--minLocusCov", true); myArgsEngine.add("-errRate", "--seqErrRate", true); myArgsEngine.add("-ref", "--referenceGenome", true); // myArgsEngine.add("-LocusBorder", "--locus-border", true); myArgsEngine.add("-inclRare", "--includeRare", false); myArgsEngine.add("-inclGaps", "--includeGaps", false); myArgsEngine.add("-callBiSNPsWGap", "--callBiSNPsWGap", false); myArgsEngine.add("-sC", "--start-chromosome", true); myArgsEngine.add("-eC", "--end-chromosome", true); } myArgsEngine.parse(args); if (myArgsEngine.getBoolean("-y")) { useTBTByte = true; } if (myArgsEngine.getBoolean("-i")) { String inputFileName = myArgsEngine.getString("-i"); inputFile = new File(inputFileName); if (!inputFile.exists() || !inputFile.isFile()) { printUsage(); throw new IllegalArgumentException("Can't find the TagsByTaxa input file (-i option: " + myArgsEngine.getString("-i") + ")."); } if (inputFileName.endsWith(".hdf") || inputFileName.endsWith(".h5")) { theTBT = new TagsByTaxaByteHDF5TagGroups(inputFileName); } else if (useTBTByte) { theTBT = new TagsByTaxaByteFileMap(inputFileName); } else { theTBT = new TagsByTaxaBitFileMap(inputFileName); } } else { printUsage(); throw new IllegalArgumentException("Please specify a TagsByTaxa input file (-i option)."); } if (myArgsEngine.getBoolean("-m")) { inTOPMFile = myArgsEngine.getString("-m"); File inTOPMFileTest = new File(inTOPMFile); if (!inTOPMFileTest.exists() || !inTOPMFileTest.isFile()) { printUsage(); throw new IllegalArgumentException("Can't find the TOPM input file (-m option: " + inTOPMFile + ")."); } inTOPMFileTest = null; boolean loadBinary = (inTOPMFile.endsWith(".txt")) ? false : true; theTOPM = new TagsOnPhysicalMap(inTOPMFile, loadBinary); } else { printUsage(); throw new IllegalArgumentException("Please specify a physical map file."); } if (myArgsEngine.getBoolean("-mUpd")) { this.isUpdateTOPM = true; this.outTOPMFile = myArgsEngine.getString("-mUpd"); } if (myArgsEngine.getBoolean("-o")) { suppliedOutputFileName = myArgsEngine.getString("-o"); boolean noWildCard = false; if (suppliedOutputFileName.contains(File.separator)) { if (!suppliedOutputFileName.substring(suppliedOutputFileName.lastIndexOf(File.separator)).contains("+")) { noWildCard = true; } } else if (!suppliedOutputFileName.contains("+")) { noWildCard = true; } if (noWildCard) { printUsage(); throw new IllegalArgumentException("The output file name should contain a \"+\" wildcard character in place of the chromosome number (-o option: " + suppliedOutputFileName + ")"); } String outFolder = suppliedOutputFileName.substring(0,suppliedOutputFileName.lastIndexOf(File.separator)); File outDir = new File(outFolder); try { if (!outDir.getCanonicalFile().isDirectory()) { throw new Exception(); } } catch (Exception e) { printUsage(); throw new IllegalArgumentException("Path to the output file does not exist (-o option: " + suppliedOutputFileName + ")"); } } if (myArgsEngine.getBoolean("-vcf")) { vcf = true; initVCFScoreMap(); } if (myArgsEngine.getBoolean("-mxSites")) { maxSize = Integer.parseInt(myArgsEngine.getString("-mxSites")); } if (myArgsEngine.getBoolean("-mnF")) { minF = Double.parseDouble(myArgsEngine.getString("-mnF")); } if (myArgsEngine.getBoolean("-p")) { String pedigreeFileStr = myArgsEngine.getString("-p"); File pedigreeFile = new File(pedigreeFileStr); if (!pedigreeFile.exists() || !pedigreeFile.isFile()) { printUsage(); throw new IllegalArgumentException("Can't find the pedigree input file (-p option: " + pedigreeFileStr + ")."); } taxaFs = readTaxaFsFromFile(pedigreeFile); if (taxaFs == null) { throw new IllegalArgumentException("Problem reading the pedigree file. Progam aborted."); } if (!maskNonInbredTaxa()) { throw new IllegalArgumentException("Mismatch between taxa names in the pedigree file and TBT. Progam aborted."); } usePedigree = true; } if (myArgsEngine.getBoolean("-mnMAF")) { minMAF = Double.parseDouble(myArgsEngine.getString("-mnMAF")); } if (myArgsEngine.getBoolean("-mnMAC")) { minMAC = Integer.parseInt(myArgsEngine.getString("-mnMAC")); } minTaxaWithLocus = (int) Math.round(theTBT.getTaxaCount() * defaultMinPropTaxaWithLocus); if (myArgsEngine.getBoolean("-mnLCov")) { double minPropTaxaWithLocus = Double.parseDouble(myArgsEngine.getString("-mnLCov")); minTaxaWithLocus = (int) Math.round(theTBT.getTaxaCount() * minPropTaxaWithLocus); } if (myArgsEngine.getBoolean("-errRate")) { errorRate = Double.parseDouble(myArgsEngine.getString("-errRate")); } if (myArgsEngine.getBoolean("-ref")) { refGenomeFileStr = myArgsEngine.getString("-ref"); File refGenomeFile = new File(refGenomeFileStr); if (!refGenomeFile.exists() || !refGenomeFile.isFile()) { printUsage(); throw new IllegalArgumentException("Can't find the reference genome fasta file (-ref option: " + refGenomeFileStr + ")."); } includeReference = true; refGenomeFile = null; System.gc(); } // the (experimental) -LocusBorder option is not properly implemented yet in Tassel4 // if (myArgsEngine.getBoolean("-LocusBorder")) { // if (!includeReference) { // printUsage(); // throw new IllegalArgumentException("The -LocusBorder option requires that the -ref option (referenceGenome) is also invoked."); // } // if (vcf) { // printUsage(); // throw new IllegalArgumentException("The -LocusBorder option is currently incompatible with the -vcf option."); // } // locusBorder = Integer.parseInt(myArgsEngine.getString("-LocusBorder")); // fuzzyStartPositions = true; // } if (myArgsEngine.getBoolean("-inclRare")) { inclRare = true; } if (myArgsEngine.getBoolean("-inclGaps")) { inclGaps = true; } if (myArgsEngine.getBoolean("-callBiSNPsWGap")) { if (inclGaps) { printUsage(); throw new IllegalArgumentException("The callBiSNPsWGap option is mutually exclusive with the inclGaps option."); } else { callBiallelicSNPsWithGap = true; } } if (myArgsEngine.getBoolean("-sC")) { startChr = Integer.parseInt(myArgsEngine.getString("-sC")); } else { printUsage(); throw new IllegalArgumentException("Please specify start and end chromosome numbers."); } if (myArgsEngine.getBoolean("-eC")) { endChr = Integer.parseInt(myArgsEngine.getString("-eC")); } else { printUsage(); throw new IllegalArgumentException("Please specify start and end chromosome numbers."); } if (endChr - startChr < 0) { printUsage(); throw new IllegalArgumentException("Error: The start chromosome is larger than the end chromosome."); } myLogger.info(String.format("minTaxaWithLocus:%d MinF:%g MinMAF:%g MinMAC:%d %n", minTaxaWithLocus, minF, minMAF, minMAC)); myLogger.info(String.format("includeRare:%s includeGaps:%s %n", inclRare, inclGaps)); } @Override public ImageIcon getIcon() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getButtonName() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getToolTipText() { throw new UnsupportedOperationException("Not supported yet."); } public void runTagsToSNPByAlignment(MutableNucleotideAlignment theMSA, String outHapMap, int targetChromo, boolean requireGeneticSupport) { long time = System.currentTimeMillis(); DataOutputStream locusLogDOS = openLocusLog(outHapMap); TagsAtLocus currTAL = new TagsAtLocus(Integer.MIN_VALUE,Byte.MIN_VALUE,Integer.MIN_VALUE,Integer.MIN_VALUE,includeReference,fuzzyStartPositions,errorRate); int[] currPos = null; int countLoci = 0; for (int i = 0; (i < theTOPM.getSize()) && (theMSA.getSiteCount() < (maxSize - 1000)); i++) { int ri = theTOPM.getReadIndexForPositionIndex(i); // process tags in order of physical position int[] newPos = theTOPM.getPositionArray(ri); if (newPos[CHR] != targetChromo) continue; //Skip tags from other chromosomes if (requireGeneticSupport && (theTOPM.getMapP(ri) < 2)) continue; //Skip tags with low mapP scores if ((fuzzyStartPositions && nearbyTag(newPos, currPos)) || Arrays.equals(newPos, currPos)) { currTAL.addTag(ri, theTOPM, theTBT, includeReference, fuzzyStartPositions); } else { int nTaxaCovered = currTAL.getNumberTaxaCovered(); if (currTAL.getSize()>1 && nTaxaCovered >= minTaxaWithLocus) { // finish the current TAL addSitesToMutableAlignment(currTAL, theMSA,locusLogDOS); // note that with fuzzyStartPositions there may be no overlapping tags!! countLoci++; if (theMSA.getSiteCount() % 100 == 0) { double rate = (double) theMSA.getSiteCount() / (double) (System.currentTimeMillis() - time); myLogger.info(String.format( "Chr:%d Pos:%d Loci=%d SNPs=%d rate=%g SNP/millisec %n", currPos[CHR], currPos[START_POS], countLoci, theMSA.getSiteCount(), rate)); } } else if (currPos!=null) { logRejectedTagLocus(currTAL,locusLogDOS); } currPos = newPos; // start a new TAL with the current tag if ((currPos[STRAND] != TagsOnPhysicalMap.byteMissing) && (currPos[START_POS] != TagsOnPhysicalMap.intMissing)) { // we already know that currPos[CHR]==targetChromo currTAL = new TagsAtLocus(currPos[CHR],(byte) currPos[STRAND],currPos[START_POS],theTOPM.getTagLength(ri),includeReference,fuzzyStartPositions,errorRate); currTAL.addTag(ri, theTOPM, theTBT, includeReference, fuzzyStartPositions); } else { currPos = null; // invalid position } } } if ((currTAL.getSize() > 1) && (currTAL.getNumberTaxaCovered() >= minTaxaWithLocus)) { // then finish the final TAL for the targetChromo addSitesToMutableAlignment(currTAL, theMSA,locusLogDOS); } else if (currPos!=null) { logRejectedTagLocus(currTAL,locusLogDOS); } if (theMSA.getSiteCount() > 0) { theMSA.clean(); ExportUtils.writeToHapmap(theMSA, false, outHapMap, '\t', null); if (vcf) { String vcfFileName; if (outHapMap.endsWith(".hmp.txt")) { vcfFileName = outHapMap.replace(".hmp.txt", ".vcf"); } else if (outHapMap.endsWith(".hmp.txt.gz")) { vcfFileName = outHapMap.replace(".hmp.txt.gz", ".vcf.gz"); } else { vcfFileName = outHapMap + ".vcf"; } ExportUtils.writeToVCF(theMSA, vcfFileName, '\t'); } } myLogger.info("Number of marker sites recorded for chr" + targetChromo + ": " + theMSA.getSiteCount()); try{ locusLogDOS.close(); } catch(Exception e) { catchLocusLogException(e); } } /** * Creates a MutableNucleotideAlignment based on the taxa in a TBT. */ private static MutableNucleotideAlignment createMutableAlignment(TagsByTaxa theTBT, int maxSites, boolean includeReference) { String[] taxaNames; if (includeReference) { int nTaxa = theTBT.getTaxaNames().length + 1; taxaNames = new String[nTaxa]; taxaNames[0] = "REFERENCE_GENOME"; // will hold the "genotype" of the reference genome for (int t = 1; t < nTaxa; t++) { taxaNames[t] = theTBT.getTaxaName(t-1); } } else { taxaNames = theTBT.getTaxaNames(); } IdGroup taxa = new SimpleIdGroup(taxaNames); MutableNucleotideAlignment theMSA = MutableNucleotideAlignment.getInstance(taxa, 0, taxa.getIdCount(), maxSites); return theMSA; } /** * Same as above method. Creates a MutableVCFAlignment */ private static MutableVCFAlignment createMutableVCFAlignment(TagsByTaxa theTBT, int maxSites, boolean includeReference) { String[] taxaNames; if (includeReference) { int nTaxa = theTBT.getTaxaNames().length + 1; taxaNames = new String[nTaxa]; taxaNames[0] = "REFERENCE_GENOME"; // will hold the "genotype" of the reference genome for (int t = 1; t < nTaxa; t++) { taxaNames[t] = theTBT.getTaxaName(t-1); } } else { taxaNames = theTBT.getTaxaNames(); } IdGroup taxa = new SimpleIdGroup(taxaNames); MutableVCFAlignment theMVA = MutableVCFAlignment.getInstance(taxa, 0, taxa.getIdCount(), maxSites); return theMVA; } boolean nearbyTag(int[] newTagPos, int[] currTagPos) { if (newTagPos == null || currTagPos == null) { return false; } // because we move through the TOPM in positional order, the newTag startPosition is guaranteed to be >= that of the current tag if (newTagPos[CHR] == currTagPos[CHR] && newTagPos[START_POS] - currTagPos[START_POS] < locusBorder) { // &&newTagPos[STRAND]==currTagPos[STRAND] // grab all of the tags that align to a local region (until a gap > tolerance is reached) currTagPos[START_POS] = newTagPos[START_POS]; return true; } return false; } private synchronized void addSitesToMutableAlignment(TagsAtLocus theTAL, MutableNucleotideAlignment theMSA, DataOutputStream locusLogDOS) { boolean refTagUsed = false; byte[][][] alleleDepths = null; byte[][] commonAlleles = null; if (theTAL.getSize() < 2) { logRejectedTagLocus(theTAL,locusLogDOS); return; // need at least two (overlapping!) sequences to make an alignment } byte[][] callsBySite; if (vcf) { if (includeReference) { addRefTag(theTAL); refTagUsed = true; } callsBySite = theTAL.getSNPCallsVCF(callBiallelicSNPsWithGap, myGenoScoreMap, includeReference); alleleDepths = theTAL.getAlleleDepthsInTaxa(); commonAlleles = theTAL.getCommonAlleles(); } else if (includeReference) { if (fuzzyStartPositions) { String refSeqInRegion = getRefSeqInRegion(theTAL); callsBySite = theTAL.getSNPCallsQuant(refSeqInRegion, callBiallelicSNPsWithGap); } else { addRefTag(theTAL); refTagUsed = true; callsBySite = theTAL.getSNPCallsQuant(callBiallelicSNPsWithGap, includeReference); } } else { callsBySite = theTAL.getSNPCallsQuant(callBiallelicSNPsWithGap, includeReference); } if (callsBySite == null) { logAcceptedTagLocus(theTAL.getLocusReport(minTaxaWithLocus, null), locusLogDOS); return; } int[] positionsInLocus = theTAL.getPositionsOfVariableSites(); int strand = theTAL.getStrand(); boolean[] varSiteKept = new boolean[callsBySite.length]; // initializes to false TagLocusSiteQualityScores SiteQualityScores = new TagLocusSiteQualityScores(callsBySite.length); for (int s = 0; s < callsBySite.length; s++) { byte[] alleles = null; if ((alleles = isSiteGood(callsBySite[s])) == null) { // NOTE: only the maj & min1 alleles are returned, so the Prod Pipeline can only call 2 alleles continue; } if (includeReference && !fuzzyStartPositions && theTAL.getRefGeno(s) == NucleotideAlignmentConstants.GAP_DIPLOID_ALLELE) { continue; } int position = (strand == -1) ? theTAL.getMinStartPosition() - positionsInLocus[s] : theTAL.getMinStartPosition() + positionsInLocus[s]; CustomSNPLogRecord mySNPLogRecord; if (customSNPLogging) { mySNPLogRecord = new CustomSNPLogRecord(s, theTAL, position, useTaxaForMinF, refTagUsed); myCustomSNPLog.writeEntry(mySNPLogRecord.toString()); SiteQualityScores.addSite(s, mySNPLogRecord.getInbredCoverage(), mySNPLogRecord.getInbredHetScore(), alleles, position); if (customFiltering && !mySNPLogRecord.isGoodSNP()) { continue; } } varSiteKept[s] = true; int currSite = theMSA.getSiteCount(); theMSA.addSite(currSite); String chromosome = String.valueOf(theTAL.getChromosome()); theMSA.setLocusOfSite(currSite, new Locus(chromosome, chromosome, -1, -1, null, null)); theMSA.setPositionOfSite(currSite, position); int offset = 0; if (includeReference && !fuzzyStartPositions) { offset = 1; byte geno = (strand == -1) ? complementGeno(theTAL.getRefGeno(s)) : theTAL.getRefGeno(s); theMSA.setBase(0, currSite, geno); theMSA.setReferenceAllele(currSite, geno); if (vcf) { byte[] depths = new byte[]{0,0,0}; // assumes maxNumAlleles = 3 theMSA.setDepthForAlleles(0, currSite, depths); } } for (int tx = 0; tx < theTBT.getTaxaCount(); tx++) { if (callsBySite[s][tx] != Alignment.UNKNOWN_DIPLOID_ALLELE && strand == -1) { theMSA.setBase(tx+offset, currSite, complementGeno(callsBySite[s][tx])); // complement to plus strand } else { theMSA.setBase(tx+offset, currSite, callsBySite[s][tx]); } if (vcf) { byte[] depths = new byte[alleleDepths.length]; for (int a = 0; a < depths.length; a++) { depths[a] = alleleDepths[a][s][tx]; } theMSA.setDepthForAlleles(tx+offset, currSite, depths); } } if (vcf) { byte[] allelesForSite = new byte[commonAlleles.length]; for (int a = 0; a < allelesForSite.length; a++) { if (strand == -1) allelesForSite[a] = complementAllele(commonAlleles[a][s]); else allelesForSite[a] = commonAlleles[a][s]; } theMSA.setCommonAlleles(currSite, allelesForSite); } if (isUpdateTOPM & !customFiltering) { updateTOPM(theTAL, s, position, strand, alleles); } if (currSite % 100 == 0) { System.out.printf("Site:%d Position:%d %n", currSite, position); } } logAcceptedTagLocus(theTAL.getLocusReport(minTaxaWithLocus, varSiteKept), locusLogDOS); if (isUpdateTOPM & customFiltering) { updateTOPM(theTAL, varSiteKept, SiteQualityScores); } } private void updateTOPM(TagsAtLocus myTAL, boolean[] varSiteKept, TagLocusSiteQualityScores SiteQualityScores) { SiteQualityScores.sortByQuality(); byte strand = myTAL.getStrand(); for (int s = 0; s < SiteQualityScores.getSize(); s++) { int siteInTAL = SiteQualityScores.getSiteInTAL(s); if (varSiteKept[siteInTAL]) { for (int tg = 0; tg < myTAL.getSize(); tg++) { int topmTagIndex = myTAL.getTOPMIndexOfTag(tg); if (topmTagIndex == Integer.MIN_VALUE) continue; // skip the reference genome tag (which may not be in the TOPM) byte baseToAdd = myTAL.getCallAtVariableSiteForTag(siteInTAL, tg); boolean matched = false; for (byte cb : SiteQualityScores.getAlleles(s)) { if (baseToAdd == cb) { matched = true; break; } } // so that all tags in the tagAlignment have the same corresponding variants in the TOPM, add a variant no matter what (set to missing if needed) byte offset = (byte) (SiteQualityScores.getPosition(s) - myTAL.getMinStartPosition()); if (!matched) { baseToAdd = Alignment.UNKNOWN_DIPLOID_ALLELE; } if (strand == -1) { baseToAdd = complementAllele(baseToAdd); // record everything relative to the plus strand } // convert from allele from 0-15 style to IUPAC ASCII character value (e.g., (byte) 'A') baseToAdd = getIUPACAllele(baseToAdd); theTOPM.addVariant(topmTagIndex, offset, baseToAdd); } } } } private void updateTOPM(TagsAtLocus myTAL, int variableSite, int position, int strand, byte[] alleles) { for (int tg = 0; tg < myTAL.getSize(); tg++) { byte baseToAdd = myTAL.getCallAtVariableSiteForTag(variableSite, tg); // NOTE: // -"alleles" contains only the maj & min1 alleles, so the Prod Pipeline can only call 2 alleles at a site // -"alleles" are coded as (byte) 0 to 15 (tassel4 encoding). So is baseToAdd, so they are matching // -this means that a production TOPM from Tassel3 cannot be used in Tassel4 boolean matched = false; for (byte cb : alleles) { if (baseToAdd == cb) { matched = true; break; } } // so that all tags in the tagAlignment have the same corresponding variants in the TOPM, add a variant no matter what (set to missing if needed) int topmTagIndex = myTAL.getTOPMIndexOfTag(tg); byte offset = (byte) (position - myTAL.getMinStartPosition()); if (!matched) { baseToAdd = Alignment.UNKNOWN_DIPLOID_ALLELE; } if (strand == -1) { baseToAdd = complementAllele(baseToAdd); // record everything relative to the plus strand } theTOPM.addVariant(topmTagIndex, offset, baseToAdd); } } /** * * @param calls * @return */ private byte[] isSiteGood(byte[] calls) { int[][] alleles = AlignmentUtils.getAllelesSortedByFrequency(calls); if (alleles[0].length < 2) { return null; // quantitative SNP calling rendered the site invariant } int aCnt = alleles[1][0] + alleles[1][1]; double theMAF = (double) alleles[1][1] / (double) aCnt; if ((theMAF < minMAF) && (alleles[1][1] < minMAC)) return null; // note that a site only needs to pass one of the criteria, minMAF &/or minMAC byte majAllele = (byte) alleles[0][0]; byte minAllele = (byte) alleles[0][1]; if (!inclGaps && ((majAllele == NucleotideAlignmentConstants.GAP_ALLELE) || (minAllele == NucleotideAlignmentConstants.GAP_ALLELE))) { return null; } byte homMaj = (byte) ((majAllele << 4) | majAllele); byte homMin = (byte) ((minAllele << 4) | minAllele); byte hetG1 = AlignmentUtils.getDiploidValue(majAllele, minAllele); byte hetG2 = AlignmentUtils.getDiploidValue(minAllele, majAllele); if (minF > -1.0) { // only test for minF if the parameter has been set above the theoretical minimum double obsF = calculateF(calls, alleles, hetG1, hetG2, theMAF); if (obsF < minF) return null; } return getGoodAlleles(calls,alleles,homMaj,homMin,hetG1,hetG2,majAllele,minAllele); } private double calculateF(byte[] calls, int[][] alleles, byte hetG1, byte hetG2, double theMAF) { boolean report = false; double obsF; int hetGCnt = 0; if (usePedigree) { byte[] callsToUse = filterCallsForInbreds(calls); //int[][] allelesToUse = getSortedAlleleCounts(callsToUse); int[][] allelesToUse = AlignmentUtils.getAllelesSortedByFrequency(callsToUse); if (allelesToUse[0].length < 2) { return 1.0; // lack of variation in the known inbreds will NOT reject a SNP } int aCnt = allelesToUse[1][0] + allelesToUse[1][1]; double newMAF = (double) allelesToUse[1][1] / (double) aCnt; if (newMAF <= 0.0) { return 1.0; // lack of variation in the known inbreds will NOT reject a SNP } byte majAllele = (byte) allelesToUse[0][0]; byte minAllele = (byte) allelesToUse[0][1]; //byte newHetG = IUPACNucleotides.getDegerateSNPByteFromTwoSNPs(majAllele, minAllele); byte newHetG1 = AlignmentUtils.getDiploidValue(majAllele, minAllele); byte newHetG2 = AlignmentUtils.getDiploidValue(minAllele, majAllele); for (byte i : callsToUse) { if (i == newHetG1 || i == newHetG2) { hetGCnt++; } } int majGCnt = (allelesToUse[1][0] - hetGCnt) / 2; // number of homozygous major allele genotypes int minGCnt = (allelesToUse[1][1] - hetGCnt) / 2; // number of homozygous minor allele genotypes double propHets = (double) hetGCnt / (double) (hetGCnt + majGCnt + minGCnt); double expHets = 2.0 * newMAF * (1 - newMAF); obsF = 1.0 - (propHets / expHets); if (report) { System.out.printf("%d %d %d propHets:%g expHets:%g obsF:%g %n", majGCnt, minGCnt, hetGCnt, propHets, expHets, obsF); } return obsF; } else { for (byte i : calls) { if (i == hetG1 || i == hetG2) { hetGCnt++; } } int majGCnt = (alleles[1][0] - hetGCnt) / 2; // number of homozygous major allele genotypes int minGCnt = (alleles[1][1] - hetGCnt) / 2; // number of homozygous minor allele genotypes double propHets = (double) hetGCnt / (double) (hetGCnt + majGCnt + minGCnt); double expHets = 2.0 * theMAF * (1 - theMAF); obsF = 1.0 - (propHets / expHets); if (report) { System.out.printf("%d %d %d propHets:%g expHets:%g obsF:%g %n", majGCnt, minGCnt, hetGCnt, propHets, expHets, obsF); } return obsF; } } private byte[] getGoodAlleles(byte[] calls,int[][] alleles,byte homMaj,byte homMin,byte hetG1,byte hetG2,byte majAllele,byte minAllele) { if (inclRare) { byte[] byteAlleles = new byte[alleles[0].length]; for (int a = 0; a < alleles[0].length; a++) { byteAlleles[a] = (byte) alleles[0][a]; } return byteAlleles; } else { setBadCallsToMissing(calls,homMaj,homMin,hetG1,hetG2,majAllele,minAllele); alleles = AlignmentUtils.getAllelesSortedByFrequency(calls); // the allele frequency & number of alleles may have been altered by setBadCallsToMissing() if (alleles[0].length < 2) { return null; // setBadCallsToMissing() rendered the site invariant } else if (alleles[0].length == 2) { return getMajMinAllelesOnly(alleles); } else { if (callBiallelicSNPsWithGap) { boolean hasGap = false; for (int a = 2; a < alleles[0].length; a++) { // NOTE: the maj & min alleles are not checked (we know that they are NOT gap (inclGaps mutually exclusive with callBiallelicSNPsWithGap) if (((byte) alleles[0][a]) == NucleotideAlignmentConstants.GAP_ALLELE) { hasGap = true; break; } } if (hasGap) { byte[] byteAlleles = new byte[3]; byteAlleles[0] = (byte) alleles[0][0]; byteAlleles[1] = (byte) alleles[0][1]; byteAlleles[2] = NucleotideAlignmentConstants.GAP_ALLELE; return byteAlleles; } else { return getMajMinAllelesOnly(alleles); } } else { return getMajMinAllelesOnly(alleles); } } } } private byte[] getMajMinAllelesOnly(int[][] alleles) { byte[] byteAlleles = new byte[2]; byteAlleles[0] = (byte) alleles[0][0]; byteAlleles[1] = (byte) alleles[0][1]; return byteAlleles; } private void setBadCallsToMissing(byte[] calls, byte homMaj, byte homMin, byte hetG1, byte hetG2, byte majAllele, byte minAllele) { if (callBiallelicSNPsWithGap) { for (int i = 0; i < calls.length; i++) { if (isGoodBiallelicWithGapCall(calls[i],homMaj,homMin,hetG1,hetG2,majAllele,minAllele)) { continue; } else { calls[i] = Alignment.UNKNOWN_DIPLOID_ALLELE; } } } else { for (int i = 0; i < calls.length; i++) { if ((calls[i] == homMaj) || (calls[i] == homMin) || (calls[i] == hetG1) || (calls[i] == hetG2)) { continue; } else { calls[i] = Alignment.UNKNOWN_DIPLOID_ALLELE; } } } } private boolean isGoodBiallelicWithGapCall(byte call, byte homMaj, byte homMin, byte hetG1, byte hetG2, byte majAllele, byte minAllele) { if (call == homMaj) return true; if (call == homMin) return true; if (call == hetG1) return true; if (call == hetG2) return true; if (call == AlignmentUtils.getDiploidValue(majAllele,NucleotideAlignmentConstants.GAP_ALLELE)) return true; if (call == AlignmentUtils.getDiploidValue(NucleotideAlignmentConstants.GAP_ALLELE,majAllele)) return true; if (call == AlignmentUtils.getDiploidValue(minAllele,NucleotideAlignmentConstants.GAP_ALLELE)) return true; if (call == AlignmentUtils.getDiploidValue(NucleotideAlignmentConstants.GAP_ALLELE,minAllele)) return true; if (call == NucleotideAlignmentConstants.GAP_DIPLOID_ALLELE) return true; return false; } private DataOutputStream openLocusLog(String outHapMap) { String logFileName; if (outHapMap.endsWith(".hmp.txt")) { logFileName = outHapMap.replace(".hmp.txt", ".LocusLog.txt"); } else if (outHapMap.endsWith(".hmp.txt.gz")) { logFileName = outHapMap.replace(".hmp.txt.gz", ".LocusLog.txt"); } else { logFileName = outHapMap + ".LocusLog.txt"; } try { DataOutputStream locusLogDOS = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(new File(logFileName)), 65536)); locusLogDOS.writeBytes( "chr\tstart\tend\tstrand\ttotalbp\tnTags\tnReads\tnTaxaCovered\tminTaxaCovered\tstatus\tnVariableSites\tposVariableSites\tnVarSitesKept\tposVarSitesKept\trefTag?\tmaxTagLen\tminTagLen\n"); return locusLogDOS; } catch (Exception e) { catchLocusLogException(e); } return null; } private void logRejectedTagLocus(TagsAtLocus currTAL, DataOutputStream locusLogDOS) { int start, end; if (currTAL.getStrand() == -1) { end = currTAL.getMinStartPosition(); start = currTAL.getMinStartPosition()-currTAL.getMaxTagLength()+1; } else { start = currTAL.getMinStartPosition(); end = currTAL.getMinStartPosition()+currTAL.getMaxTagLength()-1; } int totalbp = end-start+1; String status, refTag; if (currTAL.getSize() == 1) { status = "invariant\t0"; refTag = currTAL.getDivergenceOfTag(0)==0 ? "1" : "0"; } else { status = "tooFewTaxa\tNA"; boolean refTagFound = false; int t = -1; while (!refTagFound && t < currTAL.getSize()-1) { t++; if (currTAL.getDivergenceOfTag(t)==0) { refTagFound=true; } } refTag = refTagFound ? "1" : "0"; } try { locusLogDOS.writeBytes( currTAL.getChromosome() +"\t"+ start +"\t"+ end +"\t"+ currTAL.getStrand() +"\t"+ totalbp +"\t"+ currTAL.getSize() +"\t"+ currTAL.getTotalNReads() +"\t"+ currTAL.getNumberTaxaCovered() +"\t"+ minTaxaWithLocus +"\t"+ status +"\t"+ "NA" +"\t"+ "0" +"\t"+ "NA" +"\t"+ refTag +"\t"+ currTAL.getMaxTagLength() +"\t"+ currTAL.getMinTagLength() +"\n" ); } catch (Exception e) { catchLocusLogException(e); } } private void logAcceptedTagLocus(String locusLogRecord, DataOutputStream locusLogDOS) { try { locusLogDOS.writeBytes(locusLogRecord); } catch (Exception e) { catchLocusLogException(e); } } private void catchLocusLogException(Exception e) { System.out.println("ERROR: Unable to write to locus log file: " + e); e.printStackTrace(); System.exit(1); } private byte[] filterCallsForInbreds(byte[] calls) { byte[] callsForInbredsOnly = new byte[nInbredTaxa]; int inbred = 0; for (int taxon = 0; taxon < calls.length; taxon++) { if (useTaxaForMinF[taxon]) { callsForInbredsOnly[inbred] = calls[taxon]; inbred++; } } return callsForInbredsOnly; } public static HashMap<String, Double> readTaxaFsFromFile(File pedigreeFile) { HashMap<String, Double> taxaFs = new HashMap<String, Double>(); String inputLine = "Nothing has been read from the pedigree input file yet"; int nameCol = -1, fCol = -1, nTaxa = 0; try { BufferedReader br = new BufferedReader(new FileReader(pedigreeFile), 65536); inputLine = br.readLine(); // header line String[] cells = inputLine.split("\t"); // headers for (int col = 0; col < cells.length; col++) { if (cells[col].equalsIgnoreCase("Name")) { nameCol = col; } if (cells[col].equalsIgnoreCase("F")) { fCol = col; } } if (nameCol > -1 && fCol > -1) { while ((inputLine = br.readLine()) != null) { cells = inputLine.split("\t"); if (cells[fCol].equals("NA")) { taxaFs.put(cells[nameCol], -2.0); } else { taxaFs.put(cells[nameCol], Double.parseDouble(cells[fCol])); } ++nTaxa; } } else { throw new Exception("Name and/or F column not found in header"); } } catch (Exception e) { myLogger.error("Catch in reading pedigree file e=" + e); e.printStackTrace(); System.out.println(inputLine); return null; } myLogger.info(nTaxa + " taxa read from the pedigree file"); return taxaFs; } private boolean maskNonInbredTaxa() { useTaxaForMinF = new boolean[theTBT.getTaxaCount()]; // initialized to false nInbredTaxa = 0; try { for (int taxon = 0; taxon < theTBT.getTaxaCount(); taxon++) { if (taxaFs.containsKey(theTBT.getTaxaName(taxon))) { if (taxaFs.get(theTBT.getTaxaName(taxon)) >= minF) { useTaxaForMinF[taxon] = true; nInbredTaxa++; } } else { throw new Exception("Taxon " + theTBT.getTaxaName(taxon) + " not found in the pedigree file"); } } myLogger.info(nInbredTaxa + " taxa with an Expected F >= the mnF of " + minF + " were found in the input TBT"); return true; } catch (Exception e) { myLogger.error("Mismatch between TBT and pedigree file e=" + e); e.printStackTrace(); return false; } } private long[] readReferenceGenomeChr(String inFileStr, int targetChr) { int nBases = getLengthOfReferenceGenomeChr(inFileStr, targetChr); if (nBases == 0) return null; int basesPerLong = BaseEncoder.chunkSize; int nLongs = (nBases % basesPerLong == 0) ? nBases / basesPerLong : (nBases / basesPerLong) + 1; long[] refGenomeChrAsLongs = new long[nLongs]; myLogger.info("\n\nReading in the target chromosome " + targetChr + " from the reference genome fasta file: " + inFileStr); String temp = "Nothing has been read yet from the reference genome fasta file"; try { BufferedReader br = new BufferedReader(new FileReader(new File(inFileStr))); StringBuilder currStrB = new StringBuilder(); int currChr = Integer.MIN_VALUE, chunk = 0; while (br.ready()) { temp = br.readLine().trim(); if (temp.startsWith(">")) { if (chunk > 0) { break; // finished reading the targetChr (no need to read the rest of the file) } String chrS = temp.replace(">", ""); chrS = chrS.replace("chr", ""); currChr = Integer.parseInt(chrS); // don't need to catch exception because getLengthOfReferenceGenomeChr() would have caught it already myLogger.info("Currently reading chromosome " + currChr + " (target chromosome = " + targetChr + ")"); } else if (currChr == targetChr) { currStrB.append(temp.replace("N", "A")); // BaseEncoder encodes sequences with N's as (long) -1 while (currStrB.length() >= basesPerLong) { refGenomeChrAsLongs[chunk] = BaseEncoder.getLongFromSeq(currStrB.substring(0, basesPerLong)); currStrB = (currStrB.length() > basesPerLong) ? new StringBuilder(currStrB.substring(basesPerLong)) : new StringBuilder(); chunk++; if (chunk % 1000000 == 0) { myLogger.info(chunk + " chunks of " + basesPerLong + " bases read from the reference genome fasta file for chromosome " + targetChr); } } } } if (currStrB.length() > 0) { refGenomeChrAsLongs[chunk] = BaseEncoder.getLongFromSeq(currStrB.toString()); chunk++; } myLogger.info("\n\nFinished reading target chromosome " + targetChr + " into a total of " + chunk + " " + basesPerLong + "bp chunks\n\n"); if (chunk != nLongs) { throw new Exception("The number of 32 base chunks read (" + chunk + ") was not equal to the expected number (" + nLongs + ")"); } br.close(); } catch (Exception e) { myLogger.error("Exception caught while reading the reference genome fasta file at line. Error=" + e); e.printStackTrace(); System.exit(1); } return refGenomeChrAsLongs; } private int getLengthOfReferenceGenomeChr(String inFileStr, int targetChr) { myLogger.info("\n\nDetermining the length (in bases) of target chromosome " + targetChr + " in the reference genome fasta file: " + inFileStr); String temp = "Nothing has been read yet from the reference genome fasta file"; int line = 0, nBases = 0; try { BufferedReader br = new BufferedReader(new FileReader(new File(inFileStr))); int currChr = Integer.MIN_VALUE; while (br.ready()) { temp = br.readLine().trim(); line++; if (line % 1000000 == 0) { myLogger.info(line + " lines read from the reference genome fasta file"); } if (temp.startsWith(">")) { if (nBases > 0) { break; // finished reading the targetChr (no need to read the rest of the file) } String chrS = temp.replace(">", ""); chrS = chrS.replace("chr", ""); try { currChr = Integer.parseInt(chrS); } catch (NumberFormatException e) { myLogger.error("\n\nTagsToSNPByAlignment detected a non-numeric chromosome name in the reference genome sequence fasta file: " + chrS + "\n\nPlease change the FASTA headers in your reference genome sequence to integers " + "(>1, >2, >3, etc.) OR to 'chr' followed by an integer (>chr1, >chr2, >chr3, etc.)\n\n"); System.exit(1); } myLogger.info("Currently reading chromosome " + currChr + " (target chromosome = " + targetChr + ")"); } else if (currChr == targetChr) { nBases += temp.length(); } } if (nBases == 0) { throw new Exception("Target chromosome ("+targetChr+") not found"); } myLogger.info("The target chromosome " + targetChr + " is " + nBases + " bases long"); br.close(); } catch (Exception e) { if (nBases == 0) { myLogger.warn("Exception caught while reading the reference genome fasta file at line " + line + "\n e=" + e +"\n Skipping this chromosome..."); } else { myLogger.error("Exception caught while reading the reference genome fasta file at line " + line + "\n e=" + e); e.printStackTrace(); System.exit(1); } } return nBases; } private String getRefSeqInRegion(TagsAtLocus theTAL) { int basesPerLong = BaseEncoder.chunkSize; int refSeqStartPosition = theTAL.getMinStartPosition() - 128; int startIndex = Math.max((refSeqStartPosition / basesPerLong) - 1, 0); int refSeqEndPosition = theTAL.getMaxStartPosition() + 128; int endIndex = Math.min((refSeqEndPosition / basesPerLong) + 1, refGenomeChr.length - 1); StringBuilder sb = new StringBuilder(); for (int i = startIndex; i <= endIndex; ++i) { sb.append(BaseEncoder.getSequenceFromLong(refGenomeChr[i])); } theTAL.setMinStartPosition(startIndex * basesPerLong + 1); return sb.toString(); } private void addRefTag(TagsAtLocus theTAL) { String refTag; int basesPerLong = BaseEncoder.chunkSize; int refSeqStartPos, refSeqEndPos; if (theTAL.getStrand() == -1) { refSeqEndPos = theTAL.getMinStartPosition(); refSeqStartPos = refSeqEndPos - theTAL.getMaxTagLength() + 1; } else { refSeqStartPos = theTAL.getMinStartPosition(); refSeqEndPos = refSeqStartPos + theTAL.getMaxTagLength() - 1; } int startIndex = Math.max((refSeqStartPos/basesPerLong)-1, 0); int endIndex = Math.min((refSeqEndPos/basesPerLong), refGenomeChr.length-1); StringBuilder sb = new StringBuilder(); for (int i = startIndex; i <= endIndex; ++i) { sb.append(BaseEncoder.getSequenceFromLong(refGenomeChr[i])); } refTag = sb.substring(Math.max(refSeqStartPos-startIndex*basesPerLong-1,0), Math.min(refSeqStartPos-startIndex*basesPerLong-1+theTAL.getMaxTagLength(),sb.length())); if (theTAL.getStrand() == -1) { refTag = revComplement(refTag); } theTAL.addRefTag(refTag, theTOPM.getTagSizeInLong(), theTOPM.getNullTag()); } public static byte complementGeno(byte geno) { byte comp = Byte.MIN_VALUE; switch (geno) { case 0x00: comp = 0x33; break; // AA -> TT case 0x01: comp = 0x32; break; // AC -> TG case 0x02: comp = 0x31; break; // AG -> TC case 0x03: comp = 0x30; break; // AT -> TA case 0x11: comp = 0x22; break; // CC -> GG case 0x10: comp = 0x23; break; // CA -> GT case 0x12: comp = 0x21; break; // CG -> GC case 0x13: comp = 0x20; break; // CT -> GA case 0x22: comp = 0x11; break; // GG -> CC case 0x20: comp = 0x13; break; // GA -> CT case 0x21: comp = 0x12; break; // GC -> CG case 0x23: comp = 0x10; break; // GT -> CA case 0x33: comp = 0x00; break; // TT -> AA case 0x30: comp = 0x03; break; // TA -> AT case 0x31: comp = 0x02; break; // TC -> AG case 0x32: comp = 0x01; break; // TG -> AC case 0x05: comp = 0x35; break; // A- -> T- case 0x50: comp = 0x53; break; // -A -> -T case 0x15: comp = 0x25; break; // C- -> G- case 0x51: comp = 0x52; break; // -C -> -G case 0x25: comp = 0x15; break; // G- -> C- case 0x52: comp = 0x51; break; // -G -> -C case 0x35: comp = 0x05; break; // T- -> A- case 0x53: comp = 0x50; break; // -T -> -A case 0x55: comp = 0x55; break; // -- -> -- case Alignment.UNKNOWN_DIPLOID_ALLELE: comp = Alignment.UNKNOWN_DIPLOID_ALLELE; break; default: comp = Alignment.UNKNOWN_DIPLOID_ALLELE; break; } return comp; } public static byte complementAllele(byte allele) { byte comp = Byte.MIN_VALUE; switch (allele) { case 0x00: comp=NucleotideAlignmentConstants.T_ALLELE; break; // A -> T case 0x01: comp=NucleotideAlignmentConstants.G_ALLELE; break; // C -> G case 0x02: comp=NucleotideAlignmentConstants.C_ALLELE; break; // G -> C case 0x03: comp=NucleotideAlignmentConstants.A_ALLELE; break; // T -> A case 0x05: comp=NucleotideAlignmentConstants.GAP_ALLELE; break; // - -> - default: comp = Alignment.UNKNOWN_ALLELE; break; } return comp; } public static byte getIUPACAllele(byte allele) { byte iupacAllele = (byte) 'N'; switch (allele) { case 0x00: iupacAllele = (byte) 'A'; break; case 0x01: iupacAllele = (byte) 'C'; break; case 0x02: iupacAllele = (byte) 'G'; break; case 0x03: iupacAllele = (byte) 'T'; break; case 0x05: iupacAllele = (byte) '-'; break; default: iupacAllele = (byte) 'N'; break; } return iupacAllele; } public static char complement(char geno) { char comp = 'X'; switch (geno) { case 'A': comp = 'T'; break; case 'C': comp = 'G'; break; case 'G': comp = 'C'; break; case 'T': comp = 'A'; break; case 'K': comp = 'M'; break; case 'M': comp = 'K'; break; case 'R': comp = 'Y'; break; case 'S': comp = 'S'; break; case 'W': comp = 'W'; break; case 'Y': comp = 'R'; break; case '-': comp = '-'; break; // both strands have the deletion case '+': comp = '+'; break; // both strands have the insertion case '0': comp = '0'; break; case 'N': comp = 'N'; break; default: comp = 'N'; break; } return comp; } public static String revComplement(String seq) { StringBuilder sb = new StringBuilder(); for (int i = seq.length()-1; i >= 0; i--) { sb.append(complement(seq.charAt(i))); } return sb.toString(); } /** * Resolves the appropriate IUPACNucleotide from the given callPair * (currCall, newCall) * * CurrCall is any valid IUPACNucleotide (except '+') while newCall is * restricted to A,C,G,T,-,N * * @param currCall, the current genotypic call from previous tag(s) at the * locus * @param newCall, the new allele from the current tag to be combined with * currCall to make a new genotype * @return resolved byte (valid IUPACNucleotide) */ public static byte resolveSNPByteFromCallPair(byte currCall, byte newCall) { byte snpByte; if (newCall == 'A') { switch (currCall) { // conflicts (more than 2 alleles) get set to N case 'A': snpByte = 'A'; break; case 'C': snpByte = 'M'; break; case 'G': snpByte = 'R'; break; case 'T': snpByte = 'W'; break; case 'K': snpByte = 'N'; break; case 'M': snpByte = 'M'; break; case 'R': snpByte = 'R'; break; case 'S': snpByte = 'N'; break; case 'W': snpByte = 'W'; break; case 'Y': snpByte = 'N'; break; case '-': snpByte = '0'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else if (newCall == 'C') { switch (currCall) { // conflicts (more than 2 alleles) get set to N case 'A': snpByte = 'M'; break; case 'C': snpByte = 'C'; break; case 'G': snpByte = 'S'; break; case 'T': snpByte = 'Y'; break; case 'K': snpByte = 'N'; break; case 'M': snpByte = 'M'; break; case 'R': snpByte = 'N'; break; case 'S': snpByte = 'S'; break; case 'W': snpByte = 'N'; break; case 'Y': snpByte = 'Y'; break; case '-': snpByte = '0'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else if (newCall == 'G') { switch (currCall) { // conflicts (more than 2 alleles) get set to N case 'A': snpByte = 'R'; break; case 'C': snpByte = 'S'; break; case 'G': snpByte = 'G'; break; case 'T': snpByte = 'K'; break; case 'K': snpByte = 'K'; break; case 'M': snpByte = 'N'; break; case 'R': snpByte = 'R'; break; case 'S': snpByte = 'S'; break; case 'W': snpByte = 'N'; break; case 'Y': snpByte = 'N'; break; case '-': snpByte = '0'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else if (newCall == 'T') { switch (currCall) { // conflicts (more than 2 alleles) get set to N case 'A': snpByte = 'W'; break; case 'C': snpByte = 'Y'; break; case 'G': snpByte = 'K'; break; case 'T': snpByte = 'T'; break; case 'K': snpByte = 'K'; break; case 'M': snpByte = 'N'; break; case 'R': snpByte = 'N'; break; case 'S': snpByte = 'N'; break; case 'W': snpByte = 'W'; break; case 'Y': snpByte = 'Y'; break; case '-': snpByte = '0'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else if (newCall == '-') { // conflicts (more than 2 alleles) get set to N switch (currCall) { case 'A': snpByte = '0'; break; case 'C': snpByte = '0'; break; case 'G': snpByte = '0'; break; case 'T': snpByte = '0'; break; case 'K': snpByte = 'N'; break; case 'M': snpByte = 'N'; break; case 'R': snpByte = 'N'; break; case 'S': snpByte = 'N'; break; case 'W': snpByte = 'N'; break; case 'Y': snpByte = 'N'; break; case '-': snpByte = '-'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else if (newCall == 'N') { switch (currCall) { case 'A': snpByte = 'A'; break; case 'C': snpByte = 'C'; break; case 'G': snpByte = 'G'; break; case 'T': snpByte = 'T'; break; case 'K': snpByte = 'N'; break; case 'M': snpByte = 'N'; break; case 'R': snpByte = 'N'; break; case 'S': snpByte = 'N'; break; case 'W': snpByte = 'N'; break; case 'Y': snpByte = 'N'; break; case '-': snpByte = '-'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else { snpByte = 'N'; } return snpByte; } // Calculate QS and PL for VCF might not be in the correct class public static int[] calcScore (int a, int b) { int[] results= new int[4]; int n = a + b; int m = a; if (b > m) { m = b; } double fact = 0; if (n > m) { for (int i = n; i > m; i--) { fact += Math.log10(i); } for (int i = 1; i <= (n - m); i++){ fact -= Math.log10(i); } } double aad = Math.pow(10, fact + (double)a * v1 + (double)b * v2); double abd = Math.pow(10, fact + (double)n * v3); double bbd = Math.pow(10, fact + (double)b * v1 + (double)a * v2); double md = aad; if (md < abd) { md = abd; } if (md < bbd) { md = bbd; } int gq = 0; if ((aad + abd + bbd) > 0) { gq = (int)(md / (aad + abd + bbd) * 100); } int aa =(int) (-10 * (fact + (double)a * v1 + (double)b * v2)); int ab =(int) (-10 * (fact + (double)n * v3)); int bb =(int) (-10 * (fact + (double)b * v1 + (double)a * v2)); m = aa; if (m > ab) { m = ab; } if (m>bb) { m = bb; } aa -= m; ab -= m; bb -= m; results[0] = aa > 255 ? 255 : aa; results[1] = ab > 255 ? 255 : ab; results[2] = bb > 255 ? 255 : bb; results[3] = gq; return results; } private void initVCFScoreMap() { error = 0.001; v1 = Math.log10(1.0 - error * 3.0 /4.0); v2 = Math.log10(error/4); v3 = Math.log10(0.5 - (error/4.0)); myGenoScoreMap = new HashMap(); for (int i = 0; i < 255; i++) { for (int j = 0; j < 255; j++) { myGenoScoreMap.put(Integer.toString(i) + Integer.toString(j), calcScore(i, j)); } } } public int[] getScore(String key) { return myGenoScoreMap.get(key); } } class CustomSNPLog { private final BufferedWriter myWriter; private final String HEADER = "Chr" +"\t"+ "TagLocusStartPos" +"\t"+ "TagLocusStrand" +"\t"+ "SNPPosition" +"\t"+ "Alleles" +"\t"+ "nTagsAtLocus" +"\t"+ "nReads" +"\t"+ "nTaxa" +"\t"+ "nTaxaCovered" +"\t"+ "nInbreds" +"\t"+ "nInbredsCovered" +"\t"+ "nInbreds1Read" +"\t"+ "nInbreds1ReadMaj" +"\t"+ "nInbreds1ReadMin" +"\t"+ "nInbredsGT1Read" +"\t"+ "nInbredsGT1ReadHomoMaj" +"\t"+ "nInbredsGT1ReadHomoMin" +"\t"+ "nInbredHets" +"\t"+ "inbredCoverage" +"\t"+ "inbredHetScore" +"\t"+ "nOutbreds" +"\t"+ "nOutbredsCovered" +"\t"+ "nOutbreds1Read" +"\t"+ "nOutbreds1ReadMaj" +"\t"+ "nOutbreds1ReadMin" +"\t"+ "nOutbredsGT1Read" +"\t"+ "nOutbredsGT1ReadHomoMaj" +"\t"+ "nOutbredsGT1ReadHomoMin" +"\t"+ "nOutbredHets" +"\t"+ "passed?" +"\n"; public CustomSNPLog(String outHapMapFile, boolean append) { String logFileName; if (outHapMapFile.endsWith(".hmp.txt")) { logFileName = outHapMapFile.replace(".hmp.txt", ".customSNPLog.txt"); } else if (outHapMapFile.endsWith(".hmp.txt.gz")) { logFileName = outHapMapFile.replace(".hmp.txt.gz", ".customSNPLog.txt"); } else { logFileName = outHapMapFile + ".customSNPLog.txt"; } if ((logFileName == null) || (logFileName.length() == 0)) { myWriter = null; } else { boolean exists = false; File file = new File(logFileName); if (file.exists()) { exists = true; } myWriter = Utils.getBufferedWriter(logFileName, append); if (!exists || !append) { try { myWriter.append(HEADER); } catch (Exception e) { e.printStackTrace(); } } } } public void writeEntry(String entry) { try { myWriter.append(entry); } catch (Exception e) { e.printStackTrace(); } } public void close() { try { myWriter.close(); } catch (Exception e) { // do nothing; } } } class CustomSNPLogRecord { private int chr; private int tagLocusStartPos; private byte tagLocusStrand; private int snpPosition; private byte majAllele; private byte minAllele; private String alleles; private int nTagsAtLocus; private int nReads; private int nTaxa; private int nTaxaCovered; private int nInbreds; private int nInbredsCovered; private int nInbreds1Read; private int nInbreds1ReadMaj; private int nInbreds1ReadMin; private int nInbredsGT1Read; private int nInbredsGT1ReadHomoMaj; private int nInbredsGT1ReadHomoMin; private int nInbredHets; private int nOutbreds; private int nOutbredsCovered; private int nOutbreds1Read; private int nOutbreds1ReadMaj; private int nOutbreds1ReadMin; private int nOutbredsGT1Read; private int nOutbredsGT1ReadMajHomo; private int nOutbredsGT1ReadMinHomo; private int nOutbredHets; private double inbredCoverage; private double inbredHetScore; private boolean pass; private static final String DELIM = "\t"; public CustomSNPLogRecord(int site, TagsAtLocus myTAL, int position, boolean[] isInbred, boolean includeReference) { chr = myTAL.getChromosome(); tagLocusStartPos = myTAL.getMinStartPosition(); tagLocusStrand = myTAL.getStrand(); snpPosition = position; byte[][] byteAlleles = myTAL.getCommonAlleles(); majAllele= tagLocusStrand==-1? TagsToSNPByAlignmentPlugin.complementAllele(byteAlleles[0][site]) : byteAlleles[0][site]; minAllele= tagLocusStrand==-1? TagsToSNPByAlignmentPlugin.complementAllele(byteAlleles[1][site]) : byteAlleles[1][site]; alleles = NucleotideAlignmentConstants.NUCLEOTIDE_ALLELES[0][majAllele] + "/" + NucleotideAlignmentConstants.NUCLEOTIDE_ALLELES[0][minAllele]; nTagsAtLocus = (includeReference) ? myTAL.getSize()-1 : myTAL.getSize(); nReads = myTAL.getTotalNReads(); nTaxaCovered = myTAL.getNumberTaxaCovered(); getCounts(site, myTAL.getAlleleDepthsInTaxa(), isInbred); } private void getCounts(int site, byte[][][] alleleDepthsInTaxa, boolean[] isInbred) { nTaxa = alleleDepthsInTaxa[0][site].length; int genoDepth, nAlleles; boolean majPresent; for (int tx = 0; tx < nTaxa; tx++) { genoDepth = 0; nAlleles = 0; majPresent = false; if (isInbred == null || isInbred[tx]) { // if no pedigree file was used, assume that all taxa are inbred ++nInbreds; for (int a = 0; a < 2; a++) { int alleleDepth = alleleDepthsInTaxa[a][site][tx]; if (alleleDepth > 0) { genoDepth += alleleDepth; ++nAlleles; if (a == 0) majPresent = true; } } if (nAlleles > 0) { ++nInbredsCovered; if (genoDepth > 1) { ++nInbredsGT1Read; if (nAlleles > 1) ++nInbredHets; else if (majPresent) ++nInbredsGT1ReadHomoMaj; else ++nInbredsGT1ReadHomoMin; } else { ++nInbreds1Read; if (majPresent) ++nInbreds1ReadMaj; else ++nInbreds1ReadMin; } } } else { ++nOutbreds; for (int a = 0; a < 2; a++) { int alleleDepth = alleleDepthsInTaxa[a][site][tx]; if (alleleDepth > 0) { genoDepth += alleleDepth; ++nAlleles; if (a == 0) majPresent = true; } } if (nAlleles > 0) { ++nOutbredsCovered; if (genoDepth > 1) { ++nOutbredsGT1Read; if (nAlleles > 1) ++nOutbredHets; else if (majPresent) ++nOutbredsGT1ReadMajHomo; else ++nOutbredsGT1ReadMinHomo; } else { ++nOutbreds1Read; if (majPresent) ++nOutbreds1ReadMaj; else ++nOutbreds1ReadMin; } } } } inbredCoverage = (double) nInbredsCovered/nInbreds; inbredHetScore = (double) nInbredHets/(nInbredsGT1ReadHomoMin + nInbredHets + 0.5); if (inbredCoverage > 0.15 && inbredHetScore < 0.21) pass = true; // machine learning cutoffs set by Ed } public boolean isGoodSNP() { return pass; } public double getInbredCoverage() { return inbredCoverage; } public double getInbredHetScore() { return inbredHetScore; } public String toString() { StringBuilder sBuilder = new StringBuilder(); sBuilder.append(String.valueOf(chr)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(tagLocusStartPos)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(tagLocusStrand)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(snpPosition)); sBuilder.append(DELIM); sBuilder.append(alleles); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nTagsAtLocus)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nReads)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nTaxa)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nTaxaCovered)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbreds)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbredsCovered)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbreds1Read)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbreds1ReadMaj)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbreds1ReadMin)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbredsGT1Read)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbredsGT1ReadHomoMaj)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbredsGT1ReadHomoMin)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbredHets)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(inbredCoverage)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(inbredHetScore)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbreds)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbredsCovered)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbreds1Read)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbreds1ReadMaj)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbreds1ReadMin)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbredsGT1Read)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbredsGT1ReadMajHomo)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbredsGT1ReadMinHomo)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbredHets)); sBuilder.append(DELIM); if (pass) { sBuilder.append(String.valueOf(1)); } else { sBuilder.append(String.valueOf(0)); } sBuilder.append("\n"); return sBuilder.toString(); } } class TagLocusSiteQualityScores { private int[] siteIndicesInTAL; private double[] inbredCoverage; private double[] inbredHetScore; private byte[][] alleles; // [nSites][nAlleles] private int[] position; private int currSize; public TagLocusSiteQualityScores(int nSites) { siteIndicesInTAL = new int[nSites]; inbredCoverage = new double[nSites]; inbredHetScore = new double[nSites]; alleles = new byte[nSites][]; position = new int[nSites]; currSize = 0; } public void addSite(int siteIndex, double inbredCov, double inbredHetS, byte[] alleles, int position) { siteIndicesInTAL[currSize] = siteIndex; inbredCoverage[currSize] = inbredCov; inbredHetScore[currSize] = inbredHetS; this.alleles[currSize] = alleles; this.position[currSize] = position; ++currSize; } public int getSize() { return currSize; } public int getSiteInTAL(int site) { return siteIndicesInTAL[site]; } public byte[] getAlleles(int site) { return alleles[site]; } public int getPosition(int site) { return position[site]; } public void sortByQuality() { Swapper swapperQual = new Swapper() { public void swap(int a, int b) { int tempInt; tempInt = siteIndicesInTAL[a]; siteIndicesInTAL[a] = siteIndicesInTAL[b]; siteIndicesInTAL[b] = tempInt; double score = inbredCoverage[a]; inbredCoverage[a] = inbredCoverage[b]; inbredCoverage[b] = score; score = inbredHetScore[a]; inbredHetScore[a] = inbredHetScore[b]; inbredHetScore[b] = score; byte[] tempAlleles = alleles[a]; alleles[a] = alleles[b]; alleles[b] = tempAlleles; tempInt = position[a]; position[a] = position[b]; position[b] = tempInt; } }; IntComparator compQual = new IntComparator() { public int compare(int a, int b) { // reverse sort (high inbredCoverage is good) if (inbredCoverage[a] > inbredCoverage[b]) { return -1; } if (inbredCoverage[a] < inbredCoverage[b]) { return 1; } // normal sort (low inbredHetScore is good) if (inbredHetScore[a] < inbredHetScore[b]) { return -1; } if (inbredHetScore[a] > inbredHetScore[b]) { return 1; } // normal sort (low site indices are better because closer to start of read) if (siteIndicesInTAL[a] < siteIndicesInTAL[b]) { return -1; } if (siteIndicesInTAL[a] > siteIndicesInTAL[b]) { return 1; } return 0; } }; GenericSorting.quickSort(0, currSize, compQual, swapperQual); } }
src/net/maizegenetics/gbs/pipeline/TagsToSNPByAlignmentPlugin.java
/* * TagsToSNPByAlignmentPlugin */ package net.maizegenetics.gbs.pipeline; import cern.colt.GenericSorting; import cern.colt.Swapper; import cern.colt.function.IntComparator; import java.awt.Frame; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.util.Arrays; import java.util.HashMap; import javax.swing.ImageIcon; import net.maizegenetics.gbs.maps.TagsAtLocus; import net.maizegenetics.gbs.maps.TagsOnPhysicalMap; import net.maizegenetics.gbs.tagdist.TagsByTaxa; import net.maizegenetics.gbs.tagdist.TagsByTaxaBitFileMap; import net.maizegenetics.gbs.tagdist.TagsByTaxaByteFileMap; import net.maizegenetics.gbs.tagdist.TagsByTaxaByteHDF5TagGroups; import net.maizegenetics.util.ArgsEngine; import net.maizegenetics.gbs.util.BaseEncoder; import net.maizegenetics.pal.alignment.Alignment; import net.maizegenetics.pal.alignment.AlignmentUtils; import net.maizegenetics.pal.alignment.ExportUtils; import net.maizegenetics.pal.alignment.Locus; import net.maizegenetics.pal.alignment.MutableNucleotideAlignment; import net.maizegenetics.pal.alignment.MutableVCFAlignment; import net.maizegenetics.pal.alignment.NucleotideAlignmentConstants; import net.maizegenetics.pal.ids.IdGroup; import net.maizegenetics.pal.ids.SimpleIdGroup; import net.maizegenetics.plugindef.AbstractPlugin; import net.maizegenetics.plugindef.DataSet; import net.maizegenetics.util.Utils; import org.apache.log4j.Logger; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.SimpleLayout; import org.biojava3.core.util.ConcurrencyTools; /** * This class aligns tags at the same physical location against one another, * calls SNPs, and then outputs the SNPs to a HapMap file. * * It is multi-threaded, as there are substantial speed increases with it. * * @author edbuckler */ public class TagsToSNPByAlignmentPlugin extends AbstractPlugin { static int maxSize = 200000; //normally 200K; private double minF = -2.0, minMAF = 0.01; private int minMAC = 10; // static boolean ignoreTriallelic=false; private boolean inclRare = false; // false = only call the two most common alleles at a site private boolean inclGaps = false; // false = ignore sites where the major or the 1st minor alleles are gaps private boolean callBiallelicSNPsWithGap = false; // true = call sites with a biallelic SNP plus a gap (e.g., A/C/-) private boolean isUpdateTOPM = false; private boolean useTBTByte = false; static double defaultMinPropTaxaWithLocus = 0.1; private static Logger myLogger = Logger.getLogger(TagsToSNPByAlignmentPlugin.class); TagsOnPhysicalMap theTOPM = null; TagsByTaxa theTBT = null; File inputFile = null; private String inTOPMFile = null; private String outTOPMFile = null; private boolean usePedigree = false; HashMap<String, Double> taxaFs = null; boolean[] useTaxaForMinF = null; int nInbredTaxa = Integer.MIN_VALUE; String suppliedOutputFileName; boolean vcf = false; int startChr = Integer.MAX_VALUE; int endChr = Integer.MIN_VALUE; private static ArgsEngine myArgsEngine = null; int minTaxaWithLocus; private double errorRate = 0.01; private boolean includeReference = false; private String refGenomeFileStr = null; private long[] refGenomeChr = null; private boolean fuzzyStartPositions = false; int locusBorder = 0; final static int CHR = 0, STRAND = 1, START_POS = 2; // indices of these position attributes in array returned by theTOPM.getPositionArray(a) private boolean customSNPLogging = true; // a custom SNP log that collects useful info for filtering SNPs through machine learning criteria private CustomSNPLog myCustomSNPLog = null; private boolean customFiltering = true; // variables for calculating OS and PL for VCF, might not be in the correct class private static double error; private static double v1; private static double v2; private static double v3; private static HashMap<String, int[]> myGenoScoreMap; public TagsToSNPByAlignmentPlugin() { super(null, false); } public TagsToSNPByAlignmentPlugin(Frame parentFrame) { super(parentFrame, false); } @Override public DataSet performFunction(DataSet input) { myLogger.info("Finding SNPs in " + inputFile.getAbsolutePath() + "."); myLogger.info(String.format("StartChr:%d EndChr:%d %n", startChr, endChr)); theTOPM.sortTable(true); myLogger.info("\nAs a check, here are the first 5 tags in the TOPM (sorted by position):"); theTOPM.printRows(5, true, true); for (int chr = startChr; chr <= endChr; chr++) { myLogger.info("\n\nProcessing chromosome " + chr + "..."); String out = suppliedOutputFileName.replace("+", "" + chr); if (customSNPLogging) myCustomSNPLog = new CustomSNPLog(out, false); myLogger.info("Creating Mutable Alignment to hold genotypes for chr" + chr + " (maximum number of sites = " + maxSize + ")"); MutableNucleotideAlignment theMSA = vcf ? createMutableVCFAlignment(theTBT, maxSize + 100, includeReference) : createMutableAlignment(theTBT, maxSize + 100, includeReference); if (includeReference) { refGenomeChr = readReferenceGenomeChr(refGenomeFileStr, chr); if (refGenomeChr == null) continue; } runTagsToSNPByAlignment(theMSA, out, chr, false); if (customSNPLogging) myCustomSNPLog.close(); myLogger.info("Finished processing chromosome " + chr + "\n\n"); } if (this.isUpdateTOPM) { if (outTOPMFile.endsWith(".txt")) { theTOPM.writeTextFile(new File(outTOPMFile)); } else { theTOPM.writeBinaryFile(new File(outTOPMFile)); } } ConcurrencyTools.shutdown(); return null; } private void printUsage() { myLogger.info( "\n\n\nThe available options for the TagsToSNPByAlignmentPlugin are as follows:\n" + "-i Input .tbt file\n" + "-y Use byte-formatted TBT file (*.tbt.byte)\n" + "-m TagsOnPhysicalMap file containing genomic positions of tags\n" + "-mUpd Update TagsOnPhysicalMap file with allele calls for Production Pipeline, save to specified file (default: no updating)\n" + "-o Output HapMap file. Use a plus sign (+) as a wild card character in place of the chromosome number\n" + " (e.g., /path/hapmap/myGBSGenos.chr+.hmp.txt)\n" + "-vcf Output a VCF file (*.vcf) as well as the default HapMap (*.hmp.txt) (default: "+vcf+")\n" + "-mxSites Maximum number of sites (SNPs) output per chromosome (default: " + maxSize + ")\n" + "-mnF Minimum F (inbreeding coefficient) (default: " + minF + " = no filter)\n" + "-p Pedigree file containing full sample names (or expected names after merging) & expected inbreeding\n" + " coefficient (F) for each. Only taxa with expected F >= mnF used to calculate F = 1-Ho/He.\n" + " (default: use ALL taxa to calculate F)\n" + "-mnMAF Minimum minor allele frequency (default: " + minMAF + ")\n" + "-mnMAC Minimum minor allele count (default: " + minMAC + ")\n" + "-mnLCov Minimum locus coverage (proportion of Taxa with a genotype) (default: " + defaultMinPropTaxaWithLocus + ")\n" + "-errRate Average sequencing error rate per base (used to decide between heterozygous and homozygous calls) (default: "+errorRate+")\n" + "-ref Path to reference genome in fasta format. Ensures that a tag from the reference genome is always included\n" + " when the tags at a locus are aligned against each other to call SNPs. The reference allele for each site\n" + " is then provided in the output HapMap files, under the taxon name \"REFERENCE_GENOME\" (first taxon).\n" + " DEFAULT: Don't use reference genome.\n" // + "-LocusBorder All tags on either strand with start postions that differ by less than the specified\n" // + " integer (LocusBorder) are aligned to the reference genome to call SNPs at a locus.\n" // + " By default (without the -LocusBorder option), only tags with identical start postions and\n" // + " strand are grouped as a locus.\n" // + " Use of the -LocusBorder option requires that the -ref option is also invoked.\n" + "-inclRare Include the rare alleles at site (3 or 4th states) (default: " + inclRare + ")\n" + "-inclGaps Include sites where major or minor allele is a GAP (default: " + inclGaps + ")\n" + "-callBiSNPsWGap Include sites where the third allele is a GAP (default: " + callBiallelicSNPsWithGap + ") (mutually exclusive with inclGaps)\n" + "-sC Start chromosome\n" + "-eC End chromosome\n\n\n"); } @Override public void setParameters(String[] args) { myLogger.addAppender(new ConsoleAppender(new SimpleLayout())); if (args.length == 0) { printUsage(); throw new IllegalArgumentException("\n\nPlease use the above arguments/options.\n\n"); } if (myArgsEngine == null) { myArgsEngine = new ArgsEngine(); myArgsEngine.add("-i", "--input-file", true); myArgsEngine.add("-y", "--useTBTByte", false); myArgsEngine.add("-m", "--physical-map", true); myArgsEngine.add("-mUpd", "--update-physical-map", true); myArgsEngine.add("-o", "--output-directory", true); myArgsEngine.add("-vcf", "--output_vcf", false); myArgsEngine.add("-mxSites", "--max-sites-per-chr", true); myArgsEngine.add("-mnF", "--minFInbreeding", true); myArgsEngine.add("-p", "--pedigree-file", true); myArgsEngine.add("-mnMAF", "--minMinorAlleleFreq", true); myArgsEngine.add("-mnMAC", "--minMinorAlleleCount", true); myArgsEngine.add("-mnLCov", "--minLocusCov", true); myArgsEngine.add("-errRate", "--seqErrRate", true); myArgsEngine.add("-ref", "--referenceGenome", true); // myArgsEngine.add("-LocusBorder", "--locus-border", true); myArgsEngine.add("-inclRare", "--includeRare", false); myArgsEngine.add("-inclGaps", "--includeGaps", false); myArgsEngine.add("-callBiSNPsWGap", "--callBiSNPsWGap", false); myArgsEngine.add("-sC", "--start-chromosome", true); myArgsEngine.add("-eC", "--end-chromosome", true); } myArgsEngine.parse(args); if (myArgsEngine.getBoolean("-y")) { useTBTByte = true; } if (myArgsEngine.getBoolean("-i")) { String inputFileName = myArgsEngine.getString("-i"); inputFile = new File(inputFileName); if (!inputFile.exists() || !inputFile.isFile()) { printUsage(); throw new IllegalArgumentException("Can't find the TagsByTaxa input file (-i option: " + myArgsEngine.getString("-i") + ")."); } if (inputFileName.endsWith(".hdf") || inputFileName.endsWith(".h5")) { theTBT = new TagsByTaxaByteHDF5TagGroups(inputFileName); } else if (useTBTByte) { theTBT = new TagsByTaxaByteFileMap(inputFileName); } else { theTBT = new TagsByTaxaBitFileMap(inputFileName); } } else { printUsage(); throw new IllegalArgumentException("Please specify a TagsByTaxa input file (-i option)."); } if (myArgsEngine.getBoolean("-m")) { inTOPMFile = myArgsEngine.getString("-m"); File inTOPMFileTest = new File(inTOPMFile); if (!inTOPMFileTest.exists() || !inTOPMFileTest.isFile()) { printUsage(); throw new IllegalArgumentException("Can't find the TOPM input file (-m option: " + inTOPMFile + ")."); } inTOPMFileTest = null; boolean loadBinary = (inTOPMFile.endsWith(".txt")) ? false : true; theTOPM = new TagsOnPhysicalMap(inTOPMFile, loadBinary); } else { printUsage(); throw new IllegalArgumentException("Please specify a physical map file."); } if (myArgsEngine.getBoolean("-mUpd")) { this.isUpdateTOPM = true; this.outTOPMFile = myArgsEngine.getString("-mUpd"); } if (myArgsEngine.getBoolean("-o")) { suppliedOutputFileName = myArgsEngine.getString("-o"); boolean noWildCard = false; if (suppliedOutputFileName.contains(File.separator)) { if (!suppliedOutputFileName.substring(suppliedOutputFileName.lastIndexOf(File.separator)).contains("+")) { noWildCard = true; } } else if (!suppliedOutputFileName.contains("+")) { noWildCard = true; } if (noWildCard) { printUsage(); throw new IllegalArgumentException("The output file name should contain a \"+\" wildcard character in place of the chromosome number (-o option: " + suppliedOutputFileName + ")"); } String outFolder = suppliedOutputFileName.substring(0,suppliedOutputFileName.lastIndexOf(File.separator)); File outDir = new File(outFolder); try { if (!outDir.getCanonicalFile().isDirectory()) { throw new Exception(); } } catch (Exception e) { printUsage(); throw new IllegalArgumentException("Path to the output file does not exist (-o option: " + suppliedOutputFileName + ")"); } } if (myArgsEngine.getBoolean("-vcf")) { vcf = true; initVCFScoreMap(); } if (myArgsEngine.getBoolean("-mxSites")) { maxSize = Integer.parseInt(myArgsEngine.getString("-mxSites")); } if (myArgsEngine.getBoolean("-mnF")) { minF = Double.parseDouble(myArgsEngine.getString("-mnF")); } if (myArgsEngine.getBoolean("-p")) { String pedigreeFileStr = myArgsEngine.getString("-p"); File pedigreeFile = new File(pedigreeFileStr); if (!pedigreeFile.exists() || !pedigreeFile.isFile()) { printUsage(); throw new IllegalArgumentException("Can't find the pedigree input file (-p option: " + pedigreeFileStr + ")."); } taxaFs = readTaxaFsFromFile(pedigreeFile); if (taxaFs == null) { throw new IllegalArgumentException("Problem reading the pedigree file. Progam aborted."); } if (!maskNonInbredTaxa()) { throw new IllegalArgumentException("Mismatch between taxa names in the pedigree file and TBT. Progam aborted."); } usePedigree = true; } if (myArgsEngine.getBoolean("-mnMAF")) { minMAF = Double.parseDouble(myArgsEngine.getString("-mnMAF")); } if (myArgsEngine.getBoolean("-mnMAC")) { minMAC = Integer.parseInt(myArgsEngine.getString("-mnMAC")); } minTaxaWithLocus = (int) Math.round(theTBT.getTaxaCount() * defaultMinPropTaxaWithLocus); if (myArgsEngine.getBoolean("-mnLCov")) { double minPropTaxaWithLocus = Double.parseDouble(myArgsEngine.getString("-mnLCov")); minTaxaWithLocus = (int) Math.round(theTBT.getTaxaCount() * minPropTaxaWithLocus); } if (myArgsEngine.getBoolean("-errRate")) { errorRate = Double.parseDouble(myArgsEngine.getString("-errRate")); } if (myArgsEngine.getBoolean("-ref")) { refGenomeFileStr = myArgsEngine.getString("-ref"); File refGenomeFile = new File(refGenomeFileStr); if (!refGenomeFile.exists() || !refGenomeFile.isFile()) { printUsage(); throw new IllegalArgumentException("Can't find the reference genome fasta file (-ref option: " + refGenomeFileStr + ")."); } includeReference = true; refGenomeFile = null; System.gc(); } // the (experimental) -LocusBorder option is not properly implemented yet in Tassel4 // if (myArgsEngine.getBoolean("-LocusBorder")) { // if (!includeReference) { // printUsage(); // throw new IllegalArgumentException("The -LocusBorder option requires that the -ref option (referenceGenome) is also invoked."); // } // if (vcf) { // printUsage(); // throw new IllegalArgumentException("The -LocusBorder option is currently incompatible with the -vcf option."); // } // locusBorder = Integer.parseInt(myArgsEngine.getString("-LocusBorder")); // fuzzyStartPositions = true; // } if (myArgsEngine.getBoolean("-inclRare")) { inclRare = true; } if (myArgsEngine.getBoolean("-inclGaps")) { inclGaps = true; } if (myArgsEngine.getBoolean("-callBiSNPsWGap")) { if (inclGaps) { printUsage(); throw new IllegalArgumentException("The callBiSNPsWGap option is mutually exclusive with the inclGaps option."); } else { callBiallelicSNPsWithGap = true; } } if (myArgsEngine.getBoolean("-sC")) { startChr = Integer.parseInt(myArgsEngine.getString("-sC")); } else { printUsage(); throw new IllegalArgumentException("Please specify start and end chromosome numbers."); } if (myArgsEngine.getBoolean("-eC")) { endChr = Integer.parseInt(myArgsEngine.getString("-eC")); } else { printUsage(); throw new IllegalArgumentException("Please specify start and end chromosome numbers."); } if (endChr - startChr < 0) { printUsage(); throw new IllegalArgumentException("Error: The start chromosome is larger than the end chromosome."); } myLogger.info(String.format("minTaxaWithLocus:%d MinF:%g MinMAF:%g MinMAC:%d %n", minTaxaWithLocus, minF, minMAF, minMAC)); myLogger.info(String.format("includeRare:%s includeGaps:%s %n", inclRare, inclGaps)); } @Override public ImageIcon getIcon() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getButtonName() { throw new UnsupportedOperationException("Not supported yet."); } @Override public String getToolTipText() { throw new UnsupportedOperationException("Not supported yet."); } public void runTagsToSNPByAlignment(MutableNucleotideAlignment theMSA, String outHapMap, int targetChromo, boolean requireGeneticSupport) { long time = System.currentTimeMillis(); DataOutputStream locusLogDOS = openLocusLog(outHapMap); TagsAtLocus currTAL = new TagsAtLocus(Integer.MIN_VALUE,Byte.MIN_VALUE,Integer.MIN_VALUE,Integer.MIN_VALUE,includeReference,fuzzyStartPositions,errorRate); int[] currPos = null; int countLoci = 0; for (int i = 0; (i < theTOPM.getSize()) && (theMSA.getSiteCount() < (maxSize - 1000)); i++) { int ri = theTOPM.getReadIndexForPositionIndex(i); // process tags in order of physical position int[] newPos = theTOPM.getPositionArray(ri); if (newPos[CHR] != targetChromo) continue; //Skip tags from other chromosomes if (requireGeneticSupport && (theTOPM.getMapP(ri) < 2)) continue; //Skip tags with low mapP scores if ((fuzzyStartPositions && nearbyTag(newPos, currPos)) || Arrays.equals(newPos, currPos)) { currTAL.addTag(ri, theTOPM, theTBT, includeReference, fuzzyStartPositions); } else { int nTaxaCovered = currTAL.getNumberTaxaCovered(); if (currTAL.getSize()>1 && nTaxaCovered >= minTaxaWithLocus) { // finish the current TAL addSitesToMutableAlignment(currTAL, theMSA,locusLogDOS); // note that with fuzzyStartPositions there may be no overlapping tags!! countLoci++; if (theMSA.getSiteCount() % 100 == 0) { double rate = (double) theMSA.getSiteCount() / (double) (System.currentTimeMillis() - time); myLogger.info(String.format( "Chr:%d Pos:%d Loci=%d SNPs=%d rate=%g SNP/millisec %n", currPos[CHR], currPos[START_POS], countLoci, theMSA.getSiteCount(), rate)); } } else if (currPos!=null) { logRejectedTagLocus(currTAL,locusLogDOS); } currPos = newPos; // start a new TAL with the current tag if ((currPos[STRAND] != TagsOnPhysicalMap.byteMissing) && (currPos[START_POS] != TagsOnPhysicalMap.intMissing)) { // we already know that currPos[CHR]==targetChromo currTAL = new TagsAtLocus(currPos[CHR],(byte) currPos[STRAND],currPos[START_POS],theTOPM.getTagLength(ri),includeReference,fuzzyStartPositions,errorRate); currTAL.addTag(ri, theTOPM, theTBT, includeReference, fuzzyStartPositions); } else { currPos = null; // invalid position } } } if ((currTAL.getSize() > 1) && (currTAL.getNumberTaxaCovered() >= minTaxaWithLocus)) { // then finish the final TAL for the targetChromo addSitesToMutableAlignment(currTAL, theMSA,locusLogDOS); } else if (currPos!=null) { logRejectedTagLocus(currTAL,locusLogDOS); } if (theMSA.getSiteCount() > 0) { theMSA.clean(); ExportUtils.writeToHapmap(theMSA, false, outHapMap, '\t', null); if (vcf) { String vcfFileName; if (outHapMap.endsWith(".hmp.txt")) { vcfFileName = outHapMap.replace(".hmp.txt", ".vcf"); } else if (outHapMap.endsWith(".hmp.txt.gz")) { vcfFileName = outHapMap.replace(".hmp.txt.gz", ".vcf.gz"); } else { vcfFileName = outHapMap + ".vcf"; } ExportUtils.writeToVCF(theMSA, vcfFileName, '\t'); } } myLogger.info("Number of marker sites recorded for chr" + targetChromo + ": " + theMSA.getSiteCount()); try{ locusLogDOS.close(); } catch(Exception e) { catchLocusLogException(e); } } /** * Creates a MutableNucleotideAlignment based on the taxa in a TBT. */ private static MutableNucleotideAlignment createMutableAlignment(TagsByTaxa theTBT, int maxSites, boolean includeReference) { String[] taxaNames; if (includeReference) { int nTaxa = theTBT.getTaxaNames().length + 1; taxaNames = new String[nTaxa]; taxaNames[0] = "REFERENCE_GENOME"; // will hold the "genotype" of the reference genome for (int t = 1; t < nTaxa; t++) { taxaNames[t] = theTBT.getTaxaName(t-1); } } else { taxaNames = theTBT.getTaxaNames(); } IdGroup taxa = new SimpleIdGroup(taxaNames); MutableNucleotideAlignment theMSA = MutableNucleotideAlignment.getInstance(taxa, 0, taxa.getIdCount(), maxSites); return theMSA; } /** * Same as above method. Creates a MutableVCFAlignment */ private static MutableVCFAlignment createMutableVCFAlignment(TagsByTaxa theTBT, int maxSites, boolean includeReference) { String[] taxaNames; if (includeReference) { int nTaxa = theTBT.getTaxaNames().length + 1; taxaNames = new String[nTaxa]; taxaNames[0] = "REFERENCE_GENOME"; // will hold the "genotype" of the reference genome for (int t = 1; t < nTaxa; t++) { taxaNames[t] = theTBT.getTaxaName(t-1); } } else { taxaNames = theTBT.getTaxaNames(); } IdGroup taxa = new SimpleIdGroup(taxaNames); MutableVCFAlignment theMVA = MutableVCFAlignment.getInstance(taxa, 0, taxa.getIdCount(), maxSites); return theMVA; } boolean nearbyTag(int[] newTagPos, int[] currTagPos) { if (newTagPos == null || currTagPos == null) { return false; } // because we move through the TOPM in positional order, the newTag startPosition is guaranteed to be >= that of the current tag if (newTagPos[CHR] == currTagPos[CHR] && newTagPos[START_POS] - currTagPos[START_POS] < locusBorder) { // &&newTagPos[STRAND]==currTagPos[STRAND] // grab all of the tags that align to a local region (until a gap > tolerance is reached) currTagPos[START_POS] = newTagPos[START_POS]; return true; } return false; } private synchronized void addSitesToMutableAlignment(TagsAtLocus theTAL, MutableNucleotideAlignment theMSA, DataOutputStream locusLogDOS) { boolean refTagUsed = false; byte[][][] alleleDepths = null; byte[][] commonAlleles = null; if (theTAL.getSize() < 2) { logRejectedTagLocus(theTAL,locusLogDOS); return; // need at least two (overlapping!) sequences to make an alignment } byte[][] callsBySite; if (vcf) { if (includeReference) { addRefTag(theTAL); refTagUsed = true; } callsBySite = theTAL.getSNPCallsVCF(callBiallelicSNPsWithGap, myGenoScoreMap, includeReference); alleleDepths = theTAL.getAlleleDepthsInTaxa(); commonAlleles = theTAL.getCommonAlleles(); } else if (includeReference) { if (fuzzyStartPositions) { String refSeqInRegion = getRefSeqInRegion(theTAL); callsBySite = theTAL.getSNPCallsQuant(refSeqInRegion, callBiallelicSNPsWithGap); } else { addRefTag(theTAL); refTagUsed = true; callsBySite = theTAL.getSNPCallsQuant(callBiallelicSNPsWithGap, includeReference); } } else { callsBySite = theTAL.getSNPCallsQuant(callBiallelicSNPsWithGap, includeReference); } if (callsBySite == null) { logAcceptedTagLocus(theTAL.getLocusReport(minTaxaWithLocus, null), locusLogDOS); return; } int[] positionsInLocus = theTAL.getPositionsOfVariableSites(); int strand = theTAL.getStrand(); boolean[] varSiteKept = new boolean[callsBySite.length]; // initializes to false TagLocusSiteQualityScores SiteQualityScores = new TagLocusSiteQualityScores(callsBySite.length); for (int s = 0; s < callsBySite.length; s++) { byte[] alleles = null; if ((alleles = isSiteGood(callsBySite[s])) == null) { // NOTE: only the maj & min1 alleles are returned, so the Prod Pipeline can only call 2 alleles continue; } if (includeReference && !fuzzyStartPositions && theTAL.getRefGeno(s) == NucleotideAlignmentConstants.GAP_DIPLOID_ALLELE) { continue; } int position = (strand == -1) ? theTAL.getMinStartPosition() - positionsInLocus[s] : theTAL.getMinStartPosition() + positionsInLocus[s]; CustomSNPLogRecord mySNPLogRecord; if (customSNPLogging) { mySNPLogRecord = new CustomSNPLogRecord(s, theTAL, position, useTaxaForMinF, refTagUsed); myCustomSNPLog.writeEntry(mySNPLogRecord.toString()); SiteQualityScores.addSite(s, mySNPLogRecord.getInbredCoverage(), mySNPLogRecord.getInbredHetScore(), alleles, position); if (customFiltering && !mySNPLogRecord.isGoodSNP()) { continue; } } varSiteKept[s] = true; int currSite = theMSA.getSiteCount(); theMSA.addSite(currSite); String chromosome = String.valueOf(theTAL.getChromosome()); theMSA.setLocusOfSite(currSite, new Locus(chromosome, chromosome, -1, -1, null, null)); theMSA.setPositionOfSite(currSite, position); int offset = 0; if (includeReference && !fuzzyStartPositions) { offset = 1; byte geno = (strand == -1) ? complementGeno(theTAL.getRefGeno(s)) : theTAL.getRefGeno(s); theMSA.setBase(0, currSite, geno); theMSA.setReferenceAllele(currSite, geno); if (vcf) { byte[] depths = new byte[]{0,0,0}; // assumes maxNumAlleles = 3 theMSA.setDepthForAlleles(0, currSite, depths); } } for (int tx = 0; tx < theTBT.getTaxaCount(); tx++) { if (callsBySite[s][tx] != Alignment.UNKNOWN_DIPLOID_ALLELE && strand == -1) { theMSA.setBase(tx+offset, currSite, complementGeno(callsBySite[s][tx])); // complement to plus strand } else { theMSA.setBase(tx+offset, currSite, callsBySite[s][tx]); } if (vcf) { byte[] depths = new byte[alleleDepths.length]; for (int a = 0; a < depths.length; a++) { depths[a] = alleleDepths[a][s][tx]; } theMSA.setDepthForAlleles(tx+offset, currSite, depths); } } if (vcf) { byte[] allelesForSite = new byte[commonAlleles.length]; for (int a = 0; a < allelesForSite.length; a++) { if (strand == -1) allelesForSite[a] = complementAllele(commonAlleles[a][s]); else allelesForSite[a] = commonAlleles[a][s]; } theMSA.setCommonAlleles(currSite, allelesForSite); } if (isUpdateTOPM & !customFiltering) { updateTOPM(theTAL, s, position, strand, alleles); } if (currSite % 100 == 0) { System.out.printf("Site:%d Position:%d %n", currSite, position); } } logAcceptedTagLocus(theTAL.getLocusReport(minTaxaWithLocus, varSiteKept), locusLogDOS); if (isUpdateTOPM & customFiltering) { updateTOPM(theTAL, varSiteKept, SiteQualityScores); } } private void updateTOPM(TagsAtLocus myTAL, boolean[] varSiteKept, TagLocusSiteQualityScores SiteQualityScores) { SiteQualityScores.sortByQuality(); byte strand = myTAL.getStrand(); for (int s = 0; s < SiteQualityScores.getSize(); s++) { int siteInTAL = SiteQualityScores.getSiteInTAL(s); if (varSiteKept[siteInTAL]) { for (int tg = 0; tg < myTAL.getSize(); tg++) { int topmTagIndex = myTAL.getTOPMIndexOfTag(tg); if (topmTagIndex == Integer.MIN_VALUE) continue; // skip the reference genome tag (which may not be in the TOPM) byte baseToAdd = myTAL.getCallAtVariableSiteForTag(siteInTAL, tg); boolean matched = false; for (byte cb : SiteQualityScores.getAlleles(s)) { if (baseToAdd == cb) { matched = true; break; } } // so that all tags in the tagAlignment have the same corresponding variants in the TOPM, add a variant no matter what (set to missing if needed) byte offset = (byte) (SiteQualityScores.getPosition(s) - myTAL.getMinStartPosition()); if (!matched) { baseToAdd = Alignment.UNKNOWN_DIPLOID_ALLELE; } if (strand == -1) { baseToAdd = complementAllele(baseToAdd); // record everything relative to the plus strand } // convert from allele from 0-15 style to IUPAC ASCII character value (e.g., (byte) 'A') baseToAdd = getIUPACAllele(baseToAdd); theTOPM.addVariant(topmTagIndex, offset, baseToAdd); } } } } private void updateTOPM(TagsAtLocus myTAL, int variableSite, int position, int strand, byte[] alleles) { for (int tg = 0; tg < myTAL.getSize(); tg++) { byte baseToAdd = myTAL.getCallAtVariableSiteForTag(variableSite, tg); // NOTE: // -"alleles" contains only the maj & min1 alleles, so the Prod Pipeline can only call 2 alleles at a site // -"alleles" are coded as (byte) 0 to 15 (tassel4 encoding). So is baseToAdd, so they are matching // -this means that a production TOPM from Tassel3 cannot be used in Tassel4 boolean matched = false; for (byte cb : alleles) { if (baseToAdd == cb) { matched = true; break; } } // so that all tags in the tagAlignment have the same corresponding variants in the TOPM, add a variant no matter what (set to missing if needed) int topmTagIndex = myTAL.getTOPMIndexOfTag(tg); byte offset = (byte) (position - myTAL.getMinStartPosition()); if (!matched) { baseToAdd = Alignment.UNKNOWN_DIPLOID_ALLELE; } if (strand == -1) { baseToAdd = complementAllele(baseToAdd); // record everything relative to the plus strand } theTOPM.addVariant(topmTagIndex, offset, baseToAdd); } } /** * * @param calls * @return */ private byte[] isSiteGood(byte[] calls) { int[][] alleles = AlignmentUtils.getAllelesSortedByFrequency(calls); if (alleles[0].length < 2) { return null; // quantitative SNP calling rendered the site invariant } int aCnt = alleles[1][0] + alleles[1][1]; double theMAF = (double) alleles[1][1] / (double) aCnt; if ((theMAF < minMAF) && (alleles[1][1] < minMAC)) return null; // note that a site only needs to pass one of the criteria, minMAF &/or minMAC byte majAllele = (byte) alleles[0][0]; byte minAllele = (byte) alleles[0][1]; if (!inclGaps && ((majAllele == NucleotideAlignmentConstants.GAP_ALLELE) || (minAllele == NucleotideAlignmentConstants.GAP_ALLELE))) { return null; } byte homMaj = (byte) ((majAllele << 4) | majAllele); byte homMin = (byte) ((minAllele << 4) | minAllele); byte hetG1 = AlignmentUtils.getDiploidValue(majAllele, minAllele); byte hetG2 = AlignmentUtils.getDiploidValue(minAllele, majAllele); if (minF > -1.0) { // only test for minF if the parameter has been set above the theoretical minimum double obsF = calculateF(calls, alleles, hetG1, hetG2, theMAF); if (obsF < minF) return null; } return getGoodAlleles(calls,alleles,homMaj,homMin,hetG1,hetG2,majAllele,minAllele); } private double calculateF(byte[] calls, int[][] alleles, byte hetG1, byte hetG2, double theMAF) { boolean report = false; double obsF; int hetGCnt = 0; if (usePedigree) { byte[] callsToUse = filterCallsForInbreds(calls); //int[][] allelesToUse = getSortedAlleleCounts(callsToUse); int[][] allelesToUse = AlignmentUtils.getAllelesSortedByFrequency(callsToUse); if (allelesToUse[0].length < 2) { return 1.0; // lack of variation in the known inbreds will NOT reject a SNP } int aCnt = allelesToUse[1][0] + allelesToUse[1][1]; double newMAF = (double) allelesToUse[1][1] / (double) aCnt; if (newMAF <= 0.0) { return 1.0; // lack of variation in the known inbreds will NOT reject a SNP } byte majAllele = (byte) allelesToUse[0][0]; byte minAllele = (byte) allelesToUse[0][1]; //byte newHetG = IUPACNucleotides.getDegerateSNPByteFromTwoSNPs(majAllele, minAllele); byte newHetG1 = AlignmentUtils.getDiploidValue(majAllele, minAllele); byte newHetG2 = AlignmentUtils.getDiploidValue(minAllele, majAllele); for (byte i : callsToUse) { if (i == newHetG1 || i == newHetG2) { hetGCnt++; } } int majGCnt = (allelesToUse[1][0] - hetGCnt) / 2; // number of homozygous major allele genotypes int minGCnt = (allelesToUse[1][1] - hetGCnt) / 2; // number of homozygous minor allele genotypes double propHets = (double) hetGCnt / (double) (hetGCnt + majGCnt + minGCnt); double expHets = 2.0 * newMAF * (1 - newMAF); obsF = 1.0 - (propHets / expHets); if (report) { System.out.printf("%d %d %d propHets:%g expHets:%g obsF:%g %n", majGCnt, minGCnt, hetGCnt, propHets, expHets, obsF); } return obsF; } else { for (byte i : calls) { if (i == hetG1 || i == hetG2) { hetGCnt++; } } int majGCnt = (alleles[1][0] - hetGCnt) / 2; // number of homozygous major allele genotypes int minGCnt = (alleles[1][1] - hetGCnt) / 2; // number of homozygous minor allele genotypes double propHets = (double) hetGCnt / (double) (hetGCnt + majGCnt + minGCnt); double expHets = 2.0 * theMAF * (1 - theMAF); obsF = 1.0 - (propHets / expHets); if (report) { System.out.printf("%d %d %d propHets:%g expHets:%g obsF:%g %n", majGCnt, minGCnt, hetGCnt, propHets, expHets, obsF); } return obsF; } } private byte[] getGoodAlleles(byte[] calls,int[][] alleles,byte homMaj,byte homMin,byte hetG1,byte hetG2,byte majAllele,byte minAllele) { if (inclRare) { byte[] byteAlleles = new byte[alleles[0].length]; for (int a = 0; a < alleles[0].length; a++) { byteAlleles[a] = (byte) alleles[0][a]; } return byteAlleles; } else { setBadCallsToMissing(calls,homMaj,homMin,hetG1,hetG2,majAllele,minAllele); alleles = AlignmentUtils.getAllelesSortedByFrequency(calls); // the allele frequency & number of alleles may have been altered by setBadCallsToMissing() if (alleles[0].length < 2) { return null; // setBadCallsToMissing() rendered the site invariant } else if (alleles[0].length == 2) { return getMajMinAllelesOnly(alleles); } else { if (callBiallelicSNPsWithGap) { boolean hasGap = false; for (int a = 2; a < alleles[0].length; a++) { // NOTE: the maj & min alleles are not checked (we know that they are NOT gap (inclGaps mutually exclusive with callBiallelicSNPsWithGap) if (((byte) alleles[0][a]) == NucleotideAlignmentConstants.GAP_ALLELE) { hasGap = true; break; } } if (hasGap) { byte[] byteAlleles = new byte[3]; byteAlleles[0] = (byte) alleles[0][0]; byteAlleles[1] = (byte) alleles[0][1]; byteAlleles[2] = NucleotideAlignmentConstants.GAP_ALLELE; return byteAlleles; } else { return getMajMinAllelesOnly(alleles); } } else { return getMajMinAllelesOnly(alleles); } } } } private byte[] getMajMinAllelesOnly(int[][] alleles) { byte[] byteAlleles = new byte[2]; byteAlleles[0] = (byte) alleles[0][0]; byteAlleles[1] = (byte) alleles[0][1]; return byteAlleles; } private void setBadCallsToMissing(byte[] calls, byte homMaj, byte homMin, byte hetG1, byte hetG2, byte majAllele, byte minAllele) { if (callBiallelicSNPsWithGap) { for (int i = 0; i < calls.length; i++) { if (isGoodBiallelicWithGapCall(calls[i],homMaj,homMin,hetG1,hetG2,majAllele,minAllele)) { continue; } else { calls[i] = Alignment.UNKNOWN_DIPLOID_ALLELE; } } } else { for (int i = 0; i < calls.length; i++) { if ((calls[i] == homMaj) || (calls[i] == homMin) || (calls[i] == hetG1) || (calls[i] == hetG2)) { continue; } else { calls[i] = Alignment.UNKNOWN_DIPLOID_ALLELE; } } } } private boolean isGoodBiallelicWithGapCall(byte call, byte homMaj, byte homMin, byte hetG1, byte hetG2, byte majAllele, byte minAllele) { if (call == homMaj) return true; if (call == homMin) return true; if (call == hetG1) return true; if (call == hetG2) return true; if (call == AlignmentUtils.getDiploidValue(majAllele,NucleotideAlignmentConstants.GAP_ALLELE)) return true; if (call == AlignmentUtils.getDiploidValue(NucleotideAlignmentConstants.GAP_ALLELE,majAllele)) return true; if (call == AlignmentUtils.getDiploidValue(minAllele,NucleotideAlignmentConstants.GAP_ALLELE)) return true; if (call == AlignmentUtils.getDiploidValue(NucleotideAlignmentConstants.GAP_ALLELE,minAllele)) return true; if (call == NucleotideAlignmentConstants.GAP_DIPLOID_ALLELE) return true; return false; } private DataOutputStream openLocusLog(String outHapMap) { String logFileName; if (outHapMap.endsWith(".hmp.txt")) { logFileName = outHapMap.replace(".hmp.txt", ".LocusLog.txt"); } else if (outHapMap.endsWith(".hmp.txt.gz")) { logFileName = outHapMap.replace(".hmp.txt.gz", ".LocusLog.txt"); } else { logFileName = outHapMap + ".LocusLog.txt"; } try { DataOutputStream locusLogDOS = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(new File(logFileName)), 65536)); locusLogDOS.writeBytes( "chr\tstart\tend\tstrand\ttotalbp\tnTags\tnReads\tnTaxaCovered\tminTaxaCovered\tstatus\tnVariableSites\tposVariableSites\tnVarSitesKept\tposVarSitesKept\trefTag?\tmaxTagLen\tminTagLen\n"); return locusLogDOS; } catch (Exception e) { catchLocusLogException(e); } return null; } private void logRejectedTagLocus(TagsAtLocus currTAL, DataOutputStream locusLogDOS) { int start, end; if (currTAL.getStrand() == -1) { end = currTAL.getMinStartPosition(); start = currTAL.getMinStartPosition()-currTAL.getMaxTagLength()+1; } else { start = currTAL.getMinStartPosition(); end = currTAL.getMinStartPosition()+currTAL.getMaxTagLength()-1; } int totalbp = end-start+1; String status, refTag; if (currTAL.getSize() == 1) { status = "invariant\t0"; refTag = currTAL.getDivergenceOfTag(0)==0 ? "1" : "0"; } else { status = "tooFewTaxa\tNA"; boolean refTagFound = false; int t = -1; while (!refTagFound && t < currTAL.getSize()-1) { t++; if (currTAL.getDivergenceOfTag(t)==0) { refTagFound=true; } } refTag = refTagFound ? "1" : "0"; } try { locusLogDOS.writeBytes( currTAL.getChromosome() +"\t"+ start +"\t"+ end +"\t"+ currTAL.getStrand() +"\t"+ totalbp +"\t"+ currTAL.getSize() +"\t"+ currTAL.getTotalNReads() +"\t"+ currTAL.getNumberTaxaCovered() +"\t"+ minTaxaWithLocus +"\t"+ status +"\t"+ "NA" +"\t"+ "0" +"\t"+ "NA" +"\t"+ refTag +"\t"+ currTAL.getMaxTagLength() +"\t"+ currTAL.getMinTagLength() +"\n" ); } catch (Exception e) { catchLocusLogException(e); } } private void logAcceptedTagLocus(String locusLogRecord, DataOutputStream locusLogDOS) { try { locusLogDOS.writeBytes(locusLogRecord); } catch (Exception e) { catchLocusLogException(e); } } private void catchLocusLogException(Exception e) { System.out.println("ERROR: Unable to write to locus log file: " + e); e.printStackTrace(); System.exit(1); } private byte[] filterCallsForInbreds(byte[] calls) { byte[] callsForInbredsOnly = new byte[nInbredTaxa]; int inbred = 0; for (int taxon = 0; taxon < calls.length; taxon++) { if (useTaxaForMinF[taxon]) { callsForInbredsOnly[inbred] = calls[taxon]; inbred++; } } return callsForInbredsOnly; } public static HashMap<String, Double> readTaxaFsFromFile(File pedigreeFile) { HashMap<String, Double> taxaFs = new HashMap<String, Double>(); String inputLine = "Nothing has been read from the pedigree input file yet"; int nameCol = -1, fCol = -1, nTaxa = 0; try { BufferedReader br = new BufferedReader(new FileReader(pedigreeFile), 65536); inputLine = br.readLine(); // header line String[] cells = inputLine.split("\t"); // headers for (int col = 0; col < cells.length; col++) { if (cells[col].equalsIgnoreCase("Name")) { nameCol = col; } if (cells[col].equalsIgnoreCase("F")) { fCol = col; } } if (nameCol > -1 && fCol > -1) { while ((inputLine = br.readLine()) != null) { cells = inputLine.split("\t"); if (cells[fCol].equals("NA")) { taxaFs.put(cells[nameCol], -2.0); } else { taxaFs.put(cells[nameCol], Double.parseDouble(cells[fCol])); } ++nTaxa; } } else { throw new Exception("Name and/or F column not found in header"); } } catch (Exception e) { myLogger.error("Catch in reading pedigree file e=" + e); e.printStackTrace(); System.out.println(inputLine); return null; } myLogger.info(nTaxa + " taxa read from the pedigree file"); return taxaFs; } private boolean maskNonInbredTaxa() { useTaxaForMinF = new boolean[theTBT.getTaxaCount()]; // initialized to false nInbredTaxa = 0; try { for (int taxon = 0; taxon < theTBT.getTaxaCount(); taxon++) { if (taxaFs.containsKey(theTBT.getTaxaName(taxon))) { if (taxaFs.get(theTBT.getTaxaName(taxon)) >= minF) { useTaxaForMinF[taxon] = true; nInbredTaxa++; } } else { throw new Exception("Taxon " + theTBT.getTaxaName(taxon) + " not found in the pedigree file"); } } myLogger.info(nInbredTaxa + " taxa with an Expected F >= the mnF of " + minF + " were found in the input TBT"); return true; } catch (Exception e) { myLogger.error("Mismatch between TBT and pedigree file e=" + e); e.printStackTrace(); return false; } } private long[] readReferenceGenomeChr(String inFileStr, int targetChr) { int nBases = getLengthOfReferenceGenomeChr(inFileStr, targetChr); if (nBases == 0) return null; int basesPerLong = BaseEncoder.chunkSize; int nLongs = (nBases % basesPerLong == 0) ? nBases / basesPerLong : (nBases / basesPerLong) + 1; long[] refGenomeChrAsLongs = new long[nLongs]; myLogger.info("\n\nReading in the target chromosome " + targetChr + " from the reference genome fasta file: " + inFileStr); String temp = "Nothing has been read yet from the reference genome fasta file"; try { BufferedReader br = new BufferedReader(new FileReader(new File(inFileStr))); StringBuilder currStrB = new StringBuilder(); int currChr = Integer.MIN_VALUE, chunk = 0; while (br.ready()) { temp = br.readLine().trim(); if (temp.startsWith(">")) { if (chunk > 0) { break; // finished reading the targetChr (no need to read the rest of the file) } String chrS = temp.replace(">", ""); chrS = chrS.replace("chr", ""); currChr = Integer.parseInt(chrS); // don't need to catch exception because getLengthOfReferenceGenomeChr() would have caught it already myLogger.info("Currently reading chromosome " + currChr + " (target chromosome = " + targetChr + ")"); } else if (currChr == targetChr) { currStrB.append(temp.replace("N", "A")); // BaseEncoder encodes sequences with N's as (long) -1 while (currStrB.length() >= basesPerLong) { refGenomeChrAsLongs[chunk] = BaseEncoder.getLongFromSeq(currStrB.substring(0, basesPerLong)); currStrB = (currStrB.length() > basesPerLong) ? new StringBuilder(currStrB.substring(basesPerLong)) : new StringBuilder(); chunk++; if (chunk % 1000000 == 0) { myLogger.info(chunk + " chunks of " + basesPerLong + " bases read from the reference genome fasta file for chromosome " + targetChr); } } } } if (currStrB.length() > 0) { refGenomeChrAsLongs[chunk] = BaseEncoder.getLongFromSeq(currStrB.toString()); chunk++; } myLogger.info("\n\nFinished reading target chromosome " + targetChr + " into a total of " + chunk + " " + basesPerLong + "bp chunks\n\n"); if (chunk != nLongs) { throw new Exception("The number of 32 base chunks read (" + chunk + ") was not equal to the expected number (" + nLongs + ")"); } br.close(); } catch (Exception e) { myLogger.error("Exception caught while reading the reference genome fasta file at line. Error=" + e); e.printStackTrace(); System.exit(1); } return refGenomeChrAsLongs; } private int getLengthOfReferenceGenomeChr(String inFileStr, int targetChr) { myLogger.info("\n\nDetermining the length (in bases) of target chromosome " + targetChr + " in the reference genome fasta file: " + inFileStr); String temp = "Nothing has been read yet from the reference genome fasta file"; int line = 0, nBases = 0; try { BufferedReader br = new BufferedReader(new FileReader(new File(inFileStr))); int currChr = Integer.MIN_VALUE; while (br.ready()) { temp = br.readLine().trim(); line++; if (line % 1000000 == 0) { myLogger.info(line + " lines read from the reference genome fasta file"); } if (temp.startsWith(">")) { if (nBases > 0) { break; // finished reading the targetChr (no need to read the rest of the file) } String chrS = temp.replace(">", ""); chrS = chrS.replace("chr", ""); try { currChr = Integer.parseInt(chrS); } catch (NumberFormatException e) { myLogger.error("\n\nTagsToSNPByAlignment detected a non-numeric chromosome name in the reference genome sequence fasta file: " + chrS + "\n\nPlease change the FASTA headers in your reference genome sequence to integers " + "(>1, >2, >3, etc.) OR to 'chr' followed by an integer (>chr1, >chr2, >chr3, etc.)\n\n"); System.exit(1); } myLogger.info("Currently reading chromosome " + currChr + " (target chromosome = " + targetChr + ")"); } else if (currChr == targetChr) { nBases += temp.length(); } } if (nBases == 0) { throw new Exception("Target chromosome ("+targetChr+") not found"); } myLogger.info("The target chromosome " + targetChr + " is " + nBases + " bases long"); br.close(); } catch (Exception e) { if (nBases == 0) { myLogger.warn("Exception caught while reading the reference genome fasta file at line " + line + "\n e=" + e +"\n Skipping this chromosome..."); } else { myLogger.error("Exception caught while reading the reference genome fasta file at line " + line + "\n e=" + e); e.printStackTrace(); System.exit(1); } } return nBases; } private String getRefSeqInRegion(TagsAtLocus theTAL) { int basesPerLong = BaseEncoder.chunkSize; int refSeqStartPosition = theTAL.getMinStartPosition() - 128; int startIndex = Math.max((refSeqStartPosition / basesPerLong) - 1, 0); int refSeqEndPosition = theTAL.getMaxStartPosition() + 128; int endIndex = Math.min((refSeqEndPosition / basesPerLong) + 1, refGenomeChr.length - 1); StringBuilder sb = new StringBuilder(); for (int i = startIndex; i <= endIndex; ++i) { sb.append(BaseEncoder.getSequenceFromLong(refGenomeChr[i])); } theTAL.setMinStartPosition(startIndex * basesPerLong + 1); return sb.toString(); } private void addRefTag(TagsAtLocus theTAL) { String refTag; int basesPerLong = BaseEncoder.chunkSize; int refSeqStartPos, refSeqEndPos; if (theTAL.getStrand() == -1) { refSeqEndPos = theTAL.getMinStartPosition(); refSeqStartPos = refSeqEndPos - theTAL.getMaxTagLength() + 1; } else { refSeqStartPos = theTAL.getMinStartPosition(); refSeqEndPos = refSeqStartPos + theTAL.getMaxTagLength() - 1; } int startIndex = Math.max((refSeqStartPos/basesPerLong)-1, 0); int endIndex = Math.min((refSeqEndPos/basesPerLong), refGenomeChr.length-1); StringBuilder sb = new StringBuilder(); for (int i = startIndex; i <= endIndex; ++i) { sb.append(BaseEncoder.getSequenceFromLong(refGenomeChr[i])); } refTag = sb.substring(Math.max(refSeqStartPos-startIndex*basesPerLong-1,0), Math.min(refSeqStartPos-startIndex*basesPerLong-1+theTAL.getMaxTagLength(),sb.length())); if (theTAL.getStrand() == -1) { refTag = revComplement(refTag); } theTAL.addRefTag(refTag, theTOPM.getTagSizeInLong(), theTOPM.getNullTag()); } public static byte complementGeno(byte geno) { byte comp = Byte.MIN_VALUE; switch (geno) { case 0x00: comp = 0x33; break; // AA -> TT case 0x01: comp = 0x32; break; // AC -> TG case 0x02: comp = 0x31; break; // AG -> TC case 0x03: comp = 0x30; break; // AT -> TA case 0x11: comp = 0x22; break; // CC -> GG case 0x10: comp = 0x23; break; // CA -> GT case 0x12: comp = 0x21; break; // CG -> GC case 0x13: comp = 0x20; break; // CT -> GA case 0x22: comp = 0x11; break; // GG -> CC case 0x20: comp = 0x13; break; // GA -> CT case 0x21: comp = 0x12; break; // GC -> CG case 0x23: comp = 0x10; break; // GT -> CA case 0x33: comp = 0x00; break; // TT -> AA case 0x30: comp = 0x03; break; // TA -> AT case 0x31: comp = 0x02; break; // TC -> AG case 0x32: comp = 0x01; break; // TG -> AC case 0x05: comp = 0x35; break; // A- -> T- case 0x50: comp = 0x53; break; // -A -> -T case 0x15: comp = 0x25; break; // C- -> G- case 0x51: comp = 0x52; break; // -C -> -G case 0x25: comp = 0x15; break; // G- -> C- case 0x52: comp = 0x51; break; // -G -> -C case 0x35: comp = 0x05; break; // T- -> A- case 0x53: comp = 0x50; break; // -T -> -A case 0x55: comp = 0x55; break; // -- -> -- case Alignment.UNKNOWN_DIPLOID_ALLELE: comp = Alignment.UNKNOWN_DIPLOID_ALLELE; break; default: comp = Alignment.UNKNOWN_DIPLOID_ALLELE; break; } return comp; } public static byte complementAllele(byte allele) { byte comp = Byte.MIN_VALUE; switch (allele) { case 0x00: comp=NucleotideAlignmentConstants.T_ALLELE; break; // A -> T case 0x01: comp=NucleotideAlignmentConstants.G_ALLELE; break; // C -> G case 0x02: comp=NucleotideAlignmentConstants.C_ALLELE; break; // G -> C case 0x03: comp=NucleotideAlignmentConstants.A_ALLELE; break; // T -> A case 0x05: comp=NucleotideAlignmentConstants.GAP_ALLELE; break; // - -> - default: comp = Alignment.UNKNOWN_ALLELE; break; } return comp; } public static byte getIUPACAllele(byte allele) { byte iupacAllele = (byte) 'N'; switch (allele) { case 0x00: iupacAllele = (byte) 'A'; break; case 0x01: iupacAllele = (byte) 'C'; break; case 0x02: iupacAllele = (byte) 'G'; break; case 0x03: iupacAllele = (byte) 'T'; break; case 0x05: iupacAllele = (byte) '-'; break; default: iupacAllele = (byte) 'N'; break; } return iupacAllele; } public static char complement(char geno) { char comp = 'X'; switch (geno) { case 'A': comp = 'T'; break; case 'C': comp = 'G'; break; case 'G': comp = 'C'; break; case 'T': comp = 'A'; break; case 'K': comp = 'M'; break; case 'M': comp = 'K'; break; case 'R': comp = 'Y'; break; case 'S': comp = 'S'; break; case 'W': comp = 'W'; break; case 'Y': comp = 'R'; break; case '-': comp = '-'; break; // both strands have the deletion case '+': comp = '+'; break; // both strands have the insertion case '0': comp = '0'; break; case 'N': comp = 'N'; break; default: comp = 'N'; break; } return comp; } public static String revComplement(String seq) { StringBuilder sb = new StringBuilder(); for (int i = seq.length()-1; i >= 0; i--) { sb.append(complement(seq.charAt(i))); } return sb.toString(); } /** * Resolves the appropriate IUPACNucleotide from the given callPair * (currCall, newCall) * * CurrCall is any valid IUPACNucleotide (except '+') while newCall is * restricted to A,C,G,T,-,N * * @param currCall, the current genotypic call from previous tag(s) at the * locus * @param newCall, the new allele from the current tag to be combined with * currCall to make a new genotype * @return resolved byte (valid IUPACNucleotide) */ public static byte resolveSNPByteFromCallPair(byte currCall, byte newCall) { byte snpByte; if (newCall == 'A') { switch (currCall) { // conflicts (more than 2 alleles) get set to N case 'A': snpByte = 'A'; break; case 'C': snpByte = 'M'; break; case 'G': snpByte = 'R'; break; case 'T': snpByte = 'W'; break; case 'K': snpByte = 'N'; break; case 'M': snpByte = 'M'; break; case 'R': snpByte = 'R'; break; case 'S': snpByte = 'N'; break; case 'W': snpByte = 'W'; break; case 'Y': snpByte = 'N'; break; case '-': snpByte = '0'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else if (newCall == 'C') { switch (currCall) { // conflicts (more than 2 alleles) get set to N case 'A': snpByte = 'M'; break; case 'C': snpByte = 'C'; break; case 'G': snpByte = 'S'; break; case 'T': snpByte = 'Y'; break; case 'K': snpByte = 'N'; break; case 'M': snpByte = 'M'; break; case 'R': snpByte = 'N'; break; case 'S': snpByte = 'S'; break; case 'W': snpByte = 'N'; break; case 'Y': snpByte = 'Y'; break; case '-': snpByte = '0'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else if (newCall == 'G') { switch (currCall) { // conflicts (more than 2 alleles) get set to N case 'A': snpByte = 'R'; break; case 'C': snpByte = 'S'; break; case 'G': snpByte = 'G'; break; case 'T': snpByte = 'K'; break; case 'K': snpByte = 'K'; break; case 'M': snpByte = 'N'; break; case 'R': snpByte = 'R'; break; case 'S': snpByte = 'S'; break; case 'W': snpByte = 'N'; break; case 'Y': snpByte = 'N'; break; case '-': snpByte = '0'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else if (newCall == 'T') { switch (currCall) { // conflicts (more than 2 alleles) get set to N case 'A': snpByte = 'W'; break; case 'C': snpByte = 'Y'; break; case 'G': snpByte = 'K'; break; case 'T': snpByte = 'T'; break; case 'K': snpByte = 'K'; break; case 'M': snpByte = 'N'; break; case 'R': snpByte = 'N'; break; case 'S': snpByte = 'N'; break; case 'W': snpByte = 'W'; break; case 'Y': snpByte = 'Y'; break; case '-': snpByte = '0'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else if (newCall == '-') { // conflicts (more than 2 alleles) get set to N switch (currCall) { case 'A': snpByte = '0'; break; case 'C': snpByte = '0'; break; case 'G': snpByte = '0'; break; case 'T': snpByte = '0'; break; case 'K': snpByte = 'N'; break; case 'M': snpByte = 'N'; break; case 'R': snpByte = 'N'; break; case 'S': snpByte = 'N'; break; case 'W': snpByte = 'N'; break; case 'Y': snpByte = 'N'; break; case '-': snpByte = '-'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else if (newCall == 'N') { switch (currCall) { case 'A': snpByte = 'A'; break; case 'C': snpByte = 'C'; break; case 'G': snpByte = 'G'; break; case 'T': snpByte = 'T'; break; case 'K': snpByte = 'N'; break; case 'M': snpByte = 'N'; break; case 'R': snpByte = 'N'; break; case 'S': snpByte = 'N'; break; case 'W': snpByte = 'N'; break; case 'Y': snpByte = 'N'; break; case '-': snpByte = '-'; break; case '+': snpByte = 'N'; break; // it should not be possible for currCall to be '+' case '0': snpByte = '0'; break; case 'N': snpByte = 'N'; break; // was set to N because of a previous conflict, so should stay as N default: snpByte = 'N'; break; } } else { snpByte = 'N'; } return snpByte; } // Calculate QS and PL for VCF might not be in the correct class public static int[] calcScore (int a, int b) { int[] results= new int[4]; int n = a + b; int m = a; if (b > m) { m = b; } double fact = 0; if (n > m) { for (int i = n; i > m; i--) { fact += Math.log10(i); } for (int i = 1; i <= (n - m); i++){ fact -= Math.log10(i); } } double aad = Math.pow(10, fact + (double)a * v1 + (double)b * v2); double abd = Math.pow(10, fact + (double)n * v3); double bbd = Math.pow(10, fact + (double)b * v1 + (double)a * v2); double md = aad; if (md < abd) { md = abd; } if (md < bbd) { md = bbd; } int gq = 0; if ((aad + abd + bbd) > 0) { gq = (int)(md / (aad + abd + bbd) * 100); } int aa =(int) (-10 * (fact + (double)a * v1 + (double)b * v2)); int ab =(int) (-10 * (fact + (double)n * v3)); int bb =(int) (-10 * (fact + (double)b * v1 + (double)a * v2)); m = aa; if (m > ab) { m = ab; } if (m>bb) { m = bb; } aa -= m; ab -= m; bb -= m; results[0] = aa > 255 ? 255 : aa; results[1] = ab > 255 ? 255 : ab; results[2] = bb > 255 ? 255 : bb; results[3] = gq; return results; } private void initVCFScoreMap() { error = 0.001; v1 = Math.log10(1.0 - error * 3.0 /4.0); v2 = Math.log10(error/4); v3 = Math.log10(0.5 - (error/4.0)); myGenoScoreMap = new HashMap(); for (int i = 0; i < 255; i++) { for (int j = 0; j < 255; j++) { myGenoScoreMap.put(Integer.toString(i) + Integer.toString(j), calcScore(i, j)); } } } public int[] getScore(String key) { return myGenoScoreMap.get(key); } } class CustomSNPLog { private final BufferedWriter myWriter; private final String HEADER = "Chr" +"\t"+ "TagLocusStartPos" +"\t"+ "TagLocusStrand" +"\t"+ "SNPPosition" +"\t"+ "Alleles" +"\t"+ "nTagsAtLocus" +"\t"+ "nReads" +"\t"+ "nTaxa" +"\t"+ "nTaxaCovered" +"\t"+ "nInbreds" +"\t"+ "nInbredsCovered" +"\t"+ "nInbreds1Read" +"\t"+ "nInbreds1ReadMaj" +"\t"+ "nInbreds1ReadMin" +"\t"+ "nInbredsGT1Read" +"\t"+ "nInbredsGT1ReadHomoMaj" +"\t"+ "nInbredsGT1ReadHomoMin" +"\t"+ "nInbredHets" +"\t"+ "inbredCoverage" +"\t"+ "inbredHetScore" +"\t"+ "nOutbreds" +"\t"+ "nOutbredsCovered" +"\t"+ "nOutbreds1Read" +"\t"+ "nOutbreds1ReadMaj" +"\t"+ "nOutbreds1ReadMin" +"\t"+ "nOutbredsGT1Read" +"\t"+ "nOutbredsGT1ReadHomoMaj" +"\t"+ "nOutbredsGT1ReadHomoMin" +"\t"+ "nOutbredHets" +"\t"+ "passed?" +"\n"; public CustomSNPLog(String outHapMapFile, boolean append) { String logFileName; if (outHapMapFile.endsWith(".hmp.txt")) { logFileName = outHapMapFile.replace(".hmp.txt", ".customSNPLog.txt"); } else if (outHapMapFile.endsWith(".hmp.txt.gz")) { logFileName = outHapMapFile.replace(".hmp.txt.gz", ".customSNPLog.txt"); } else { logFileName = outHapMapFile + ".customSNPLog.txt"; } if ((logFileName == null) || (logFileName.length() == 0)) { myWriter = null; } else { boolean exists = false; File file = new File(logFileName); if (file.exists()) { exists = true; } myWriter = Utils.getBufferedWriter(logFileName, append); if (!exists || !append) { try { myWriter.append(HEADER); } catch (Exception e) { e.printStackTrace(); } } } } public void writeEntry(String entry) { try { myWriter.append(entry); } catch (Exception e) { e.printStackTrace(); } } public void close() { try { myWriter.close(); } catch (Exception e) { // do nothing; } } } class CustomSNPLogRecord { private int chr; private int tagLocusStartPos; private byte tagLocusStrand; private int snpPosition; private byte majAllele; private byte minAllele; private String alleles; private int nTagsAtLocus; private int nReads; private int nTaxa; private int nTaxaCovered; private int nInbreds; private int nInbredsCovered; private int nInbreds1Read; private int nInbreds1ReadMaj; private int nInbreds1ReadMin; private int nInbredsGT1Read; private int nInbredsGT1ReadHomoMaj; private int nInbredsGT1ReadHomoMin; private int nInbredHets; private int nOutbreds; private int nOutbredsCovered; private int nOutbreds1Read; private int nOutbreds1ReadMaj; private int nOutbreds1ReadMin; private int nOutbredsGT1Read; private int nOutbredsGT1ReadMajHomo; private int nOutbredsGT1ReadMinHomo; private int nOutbredHets; private double inbredCoverage; private double inbredHetScore; private boolean pass; private static final String DELIM = "\t"; public CustomSNPLogRecord(int site, TagsAtLocus myTAL, int position, boolean[] isInbred, boolean includeReference) { chr = myTAL.getChromosome(); tagLocusStartPos = myTAL.getMinStartPosition(); tagLocusStrand = myTAL.getStrand(); snpPosition = position; byte[][] byteAlleles = myTAL.getCommonAlleles(); majAllele= tagLocusStrand==-1? TagsToSNPByAlignmentPlugin.complementAllele(byteAlleles[0][site]) : byteAlleles[0][site]; minAllele= tagLocusStrand==-1? TagsToSNPByAlignmentPlugin.complementAllele(byteAlleles[1][site]) : byteAlleles[1][site]; alleles = NucleotideAlignmentConstants.NUCLEOTIDE_ALLELES[0][majAllele] + "/" + NucleotideAlignmentConstants.NUCLEOTIDE_ALLELES[0][minAllele]; nTagsAtLocus = (includeReference) ? myTAL.getSize()-1 : myTAL.getSize(); nReads = myTAL.getTotalNReads(); nTaxaCovered = myTAL.getNumberTaxaCovered(); getCounts(site, myTAL.getAlleleDepthsInTaxa(), isInbred); } private void getCounts(int site, byte[][][] alleleDepthsInTaxa, boolean[] isInbred) { nTaxa = alleleDepthsInTaxa[0][site].length; int genoDepth, nAlleles; boolean majPresent; for (int tx = 0; tx < nTaxa; tx++) { genoDepth = 0; nAlleles = 0; majPresent = false; if (isInbred == null || isInbred[tx]) { // if no pedigree file was used, assume that all taxa are inbred ++nInbreds; for (int a = 0; a < 2; a++) { int alleleDepth = alleleDepthsInTaxa[a][site][tx]; if (alleleDepth > 0) { genoDepth += alleleDepth; ++nAlleles; if (a == 0) majPresent = true; } } if (nAlleles > 0) { ++nInbredsCovered; if (genoDepth > 1) { ++nInbredsGT1Read; if (nAlleles > 1) ++nInbredHets; else if (majPresent) ++nInbredsGT1ReadHomoMaj; else ++nInbredsGT1ReadHomoMin; } else { ++nInbreds1Read; if (majPresent) ++nInbreds1ReadMaj; else ++nInbreds1ReadMin; } } } else { ++nOutbreds; for (int a = 0; a < 2; a++) { int alleleDepth = alleleDepthsInTaxa[a][site][tx]; if (alleleDepth > 0) { genoDepth += alleleDepth; ++nAlleles; if (a == 0) majPresent = true; } } if (nAlleles > 0) { ++nOutbredsCovered; if (genoDepth > 1) { ++nOutbredsGT1Read; if (nAlleles > 1) ++nOutbredHets; else if (majPresent) ++nOutbredsGT1ReadMajHomo; else ++nOutbredsGT1ReadMinHomo; } else { ++nOutbreds1Read; if (majPresent) ++nOutbreds1ReadMaj; else ++nOutbreds1ReadMin; } } } } inbredCoverage = (double) nInbredsCovered/nInbreds; inbredHetScore = (double) nInbredHets/(nInbredsGT1ReadHomoMin + nInbredHets + 0.5); if (inbredCoverage > 0.15 && inbredHetScore < 0.21) pass = true; // machine learning cutoffs set by Ed } public boolean isGoodSNP() { return pass; } public double getInbredCoverage() { return inbredCoverage; } public double getInbredHetScore() { return inbredHetScore; } public String toString() { StringBuilder sBuilder = new StringBuilder(); sBuilder.append(String.valueOf(chr)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(tagLocusStartPos)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(tagLocusStrand)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(snpPosition)); sBuilder.append(DELIM); sBuilder.append(alleles); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nTagsAtLocus)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nReads)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nTaxa)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nTaxaCovered)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbreds)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbredsCovered)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbreds1Read)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbreds1ReadMaj)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbreds1ReadMin)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbredsGT1Read)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbredsGT1ReadHomoMaj)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbredsGT1ReadHomoMin)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nInbredHets)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(inbredCoverage)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(inbredHetScore)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbreds)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbredsCovered)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbreds1Read)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbreds1ReadMaj)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbreds1ReadMin)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbredsGT1Read)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbredsGT1ReadMajHomo)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbredsGT1ReadMinHomo)); sBuilder.append(DELIM); sBuilder.append(String.valueOf(nOutbredHets)); sBuilder.append(DELIM); if (pass) { sBuilder.append(String.valueOf(1)); } else { sBuilder.append(String.valueOf(0)); } sBuilder.append("\n"); return sBuilder.toString(); } } class TagLocusSiteQualityScores { private int[] siteIndicesInTAL; private double[] inbredCoverage; private double[] inbredHetScore; private byte[][] alleles; // [nSites][nAlleles] private int[] position; private int currSize; public TagLocusSiteQualityScores(int nSites) { siteIndicesInTAL = new int[nSites]; inbredCoverage = new double[nSites]; inbredHetScore = new double[nSites]; alleles = new byte[nSites][]; position = new int[nSites]; currSize = 0; } public void addSite(int siteIndex, double inbredCov, double inbredHetS, byte[] alleles, int position) { siteIndicesInTAL[currSize] = siteIndex; inbredCoverage[currSize] = inbredCov; inbredHetScore[currSize] = inbredHetS; this.alleles[currSize] = alleles; this.position[currSize] = position; ++currSize; } public int getSize() { return currSize; } public int getSiteInTAL(int site) { return siteIndicesInTAL[site]; } public byte[] getAlleles(int site) { return alleles[site]; } public int getPosition(int site) { return position[site]; } public void sortByQuality() { Swapper swapperQual = new Swapper() { public void swap(int a, int b) { int tempInt; tempInt = siteIndicesInTAL[a]; siteIndicesInTAL[a] = siteIndicesInTAL[b]; siteIndicesInTAL[b] = tempInt; double score = inbredCoverage[a]; inbredCoverage[a] = inbredCoverage[b]; inbredCoverage[b] = score; score = inbredHetScore[a]; inbredHetScore[a] = inbredHetScore[b]; inbredHetScore[b] = score; byte[] tempAlleles = alleles[a]; alleles[a] = alleles[b]; alleles[b] = tempAlleles; tempInt = position[a]; position[a] = position[b]; position[b] = tempInt; } }; IntComparator compQual = new IntComparator() { public int compare(int a, int b) { // reverse sort (high inbredCoverage is good) if (inbredCoverage[a] > inbredCoverage[b]) { return -1; } if (inbredCoverage[a] < inbredCoverage[b]) { return 1; } // normal sort (low inbredHetScore is good) if (inbredHetScore[a] < inbredHetScore[b]) { return -1; } if (inbredHetScore[a] > inbredHetScore[b]) { return 1; } // normal sort (low site indices are better because closer to start of read) if (siteIndicesInTAL[a] < siteIndicesInTAL[b]) { return -1; } if (siteIndicesInTAL[a] > siteIndicesInTAL[b]) { return 1; } return 0; } }; GenericSorting.quickSort(0, currSize, compQual, swapperQual); } }
PRIVATE: turned off hardcoded customSNPFiltering (not appropriate for general users)
src/net/maizegenetics/gbs/pipeline/TagsToSNPByAlignmentPlugin.java
PRIVATE: turned off hardcoded customSNPFiltering (not appropriate for general users)
Java
mit
114f64e297e06d81a9d825bd4e3ba5da1c0f9316
0
FAU-Inf2/kwikshop-android,FAU-Inf2/kwikshop-android
package de.cs.fau.mad.quickshop.android; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarActivity; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.FrameLayout; import android.widget.ListView; import android.widget.Toast; import java.util.ArrayList; import cs.fau.mad.quickshop_android.R; /** * BaseActivity: all activities have to inherit */ public class BaseActivity extends ActionBarActivity { public static FrameLayout frameLayout; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); frameLayout = (FrameLayout) findViewById(R.id.content_frame); getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setHomeButtonEnabled(true); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.main, menu); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.action_settings: startActivity(new Intent(this, SettingActivity.class)); return true; case R.id.action_about: startActivity(new Intent(this, AboutActivity.class)); return true; case R.id.action_listofshoppinglists: startActivity(new Intent(this, ListOfShoppingListsActivity.class)); return true; default: return super.onOptionsItemSelected(item); } } }
app/src/main/java/de/cs/fau/mad/quickshop/android/BaseActivity.java
package de.cs.fau.mad.quickshop.android; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarActivity; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.FrameLayout; import android.widget.ListView; import android.widget.Toast; import java.util.ArrayList; import cs.fau.mad.quickshop_android.R; /** * BaseActivity: all activities have to inherit */ public class BaseActivity extends ActionBarActivity { public static FrameLayout frameLayout; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setHomeButtonEnabled(true); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.main, menu); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.action_settings: startActivity(new Intent(this, SettingActivity.class)); return true; case R.id.action_about: startActivity(new Intent(this, AboutActivity.class)); return true; case R.id.action_listofshoppinglists: startActivity(new Intent(this, ListOfShoppingListsActivity.class)); return true; default: return super.onOptionsItemSelected(item); } } }
Set frameLayout field in BaseActivity
app/src/main/java/de/cs/fau/mad/quickshop/android/BaseActivity.java
Set frameLayout field in BaseActivity
Java
mit
304944830e48c0b0524fda885a2e0999bfdaf7d9
0
stdrone/junior
package ru.sfedu.mmcs.portfolio.swing.chart; import java.awt.BasicStroke; import java.awt.Color; import java.awt.event.MouseEvent; import java.awt.geom.Ellipse2D; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import org.apache.commons.math3.geometry.euclidean.twod.Vector2D; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartMouseEvent; import org.jfree.chart.ChartMouseListener; import org.jfree.chart.ChartPanel; import org.jfree.chart.JFreeChart; import org.jfree.chart.axis.NumberAxis; import org.jfree.chart.entity.XYItemEntity; import org.jfree.chart.event.PlotChangeEvent; import org.jfree.chart.event.PlotChangeListener; import org.jfree.chart.plot.PlotOrientation; import org.jfree.chart.plot.XYPlot; import org.jfree.chart.renderer.AbstractRenderer; import org.jfree.chart.renderer.xy.StandardXYItemRenderer; import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer; import org.jfree.data.xy.DefaultXYDataset; import org.jfree.util.ShapeUtilities; import ru.sfedu.mmcs.portfolio.AnalyzerData; import ru.sfedu.mmcs.portfolio.Portfolio; import ru.sfedu.mmcs.portfolio.frontier.Frontier; import ru.sfedu.mmcs.portfolio.swing.chart.data.DataSetActives; import ru.sfedu.mmcs.portfolio.swing.chart.data.DataSetFrontier; import ru.sfedu.mmcs.portfolio.swing.chart.data.DataSetOptimal; public class JFrontierChart extends ChartPanel { private static final long serialVersionUID = -4326820921442119966L; private JFrontierChart(JFreeChart chart) { super(chart); } private XYPlot _plot; private Frontier _frontier; public void refresh(AnalyzerData data) { if(data.getResult() != null) { _frontier = data.getResult(); DataSetFrontier frontierDataset = new DataSetFrontier(data.getResult().getFrontier()); DataSetActives portfolioDataset = new DataSetActives(data.getResult().getFrontier()); DataSetOptimal optimalDataset = new DataSetOptimal(data.getResult()); _plot.setDataset(frontierDataset); _plot.setDataset(1, portfolioDataset); _plot.setDataset(3, optimalDataset); _plot.getDomainAxis(0).setAutoRange(true); _plot.getRangeAxis(0).setAutoRange(true); } } public static JFrontierChart createFrontierChartPanel(String title, AnalyzerData data) { JFreeChart chart = ChartFactory.createXYLineChart(title, "μ", "V(μ)", null, PlotOrientation.VERTICAL, true, false, false); JFrontierChart chartPanel = new JFrontierChart(chart); chartPanel.setMouseWheelEnabled(true); chartPanel.setPopupMenu(chartPanel.createPopupMenu(false, true, true, true)); chartPanel._plot = chart.getXYPlot(); chartPanel._plot.addChangeListener(chartPanel.new FixAxisListner()); chartPanel.addChartMouseListener(chartPanel.new ChartClick()); NumberAxis axisActives = new NumberAxis("Активы"); axisActives.setRange(0.0, 1.0); axisActives.setAutoRange(false); chartPanel._plot.setRangeAxis(1,axisActives); chartPanel._plot.mapDatasetToRangeAxis(1,1); chartPanel._plot.mapDatasetToRangeAxis(2,0); chartPanel._plot.setRenderer(0, new StandardXYItemRenderer()); chartPanel._plot.setRenderer(1, new StandardXYItemRenderer()); ((AbstractRenderer) chartPanel._plot.getRenderer(1)).setAutoPopulateSeriesStroke(false); chartPanel._plot.getRenderer(1).setBaseStroke(new BasicStroke(1.5f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 5.0f, new float[] {2.0f,1.0f,0.5f,1.0f}, 0.0f)); chartPanel._plot.setRenderer(2, new XYLineAndShapeRenderer()); ((AbstractRenderer) chartPanel._plot.getRenderer(2)).setAutoPopulateSeriesShape(false); ((AbstractRenderer) chartPanel._plot.getRenderer(2)).setAutoPopulateSeriesPaint(false); chartPanel._plot.getRenderer(2).setBaseShape(ShapeUtilities.createDiagonalCross(3, 1)); chartPanel._plot.getRenderer(2).setBasePaint(Color.BLACK); chartPanel._plot.getRenderer(2).setBaseSeriesVisibleInLegend(false); chartPanel._plot.setRenderer(3, new XYLineAndShapeRenderer()); ((AbstractRenderer) chartPanel._plot.getRenderer(3)).setAutoPopulateSeriesShape(false); chartPanel._plot.getRenderer(3).setBaseShape(new Ellipse2D.Double(-3, -3, 6, 6)); chartPanel.setHorizontalAxisTrace(true); chartPanel.refresh(data); return chartPanel; } private class FixAxisListner implements PlotChangeListener{ private boolean _changed = false; @Override public void plotChanged(PlotChangeEvent arg0) { if(!_changed) { _changed = true; ((XYPlot)arg0.getPlot()).getRangeAxis(1).setRange(0.0,1.0); } _changed = false; } }; private class ChartClick implements ChartMouseListener{ private Portfolio _data; @Override public void chartMouseClicked(ChartMouseEvent e) { if(e.getTrigger().getButton() == MouseEvent.BUTTON1) { if(_data != null) JFrontierChart.this.Events.fireEntityClick(new EventEntityClick(JFrontierChart.this, _data)); else if(JFrontierChart.this._frontier != null) { Point2D p = JFrontierChart.this.translateScreenToJava2D(e.getTrigger().getPoint()); Rectangle2D plotArea = JFrontierChart.this.getScreenDataArea(); XYPlot plot = JFrontierChart.this._plot; double chartX = plot.getDomainAxis().java2DToValue(p.getX(), plotArea, plot.getDomainAxisEdge()); //double chartY = plot.getRangeAxis().java2DToValue(p.getY(), plotArea, plot.getRangeAxisEdge()); JFrontierChart.this.Events.fireEntityClick( new EventEntityClick(JFrontierChart.this, JFrontierChart.this._frontier.calcPortfolio(new Vector2D(chartX, 0))) ); } } } @Override public void chartMouseMoved(ChartMouseEvent e) { DefaultXYDataset dataPoint = new DefaultXYDataset(); _data = null; if(e.getEntity() instanceof XYItemEntity) { XYItemEntity ce = (XYItemEntity) e.getEntity(); if(ce.getDataset() instanceof DataSetOptimal) { double x = (double) ce.getDataset().getX(ce.getSeriesIndex(), ce.getItem()); double y = (double) ce.getDataset().getY(ce.getSeriesIndex(), ce.getItem()); if(ce.getDataset() instanceof DataSetOptimal) _data = ((DataSetOptimal)ce.getDataset()).getPortfolio(ce.getSeriesIndex(), ce.getItem()); dataPoint.addSeries("", new double[][] {{x},{y}}); } } JFrontierChart.this._plot.setDataset(2, dataPoint); } } public final EntityClickSource Events = new EntityClickSource(); }
src/ru/sfedu/mmcs/portfolio/swing/chart/JFrontierChart.java
package ru.sfedu.mmcs.portfolio.swing.chart; import java.awt.BasicStroke; import java.awt.Color; import java.awt.geom.Ellipse2D; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import org.apache.commons.math3.geometry.euclidean.twod.Vector2D; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartMouseEvent; import org.jfree.chart.ChartMouseListener; import org.jfree.chart.ChartPanel; import org.jfree.chart.JFreeChart; import org.jfree.chart.axis.NumberAxis; import org.jfree.chart.entity.XYItemEntity; import org.jfree.chart.event.PlotChangeEvent; import org.jfree.chart.event.PlotChangeListener; import org.jfree.chart.plot.PlotOrientation; import org.jfree.chart.plot.XYPlot; import org.jfree.chart.renderer.AbstractRenderer; import org.jfree.chart.renderer.xy.StandardXYItemRenderer; import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer; import org.jfree.data.xy.DefaultXYDataset; import org.jfree.util.ShapeUtilities; import ru.sfedu.mmcs.portfolio.AnalyzerData; import ru.sfedu.mmcs.portfolio.Portfolio; import ru.sfedu.mmcs.portfolio.frontier.Frontier; import ru.sfedu.mmcs.portfolio.swing.chart.data.DataSetActives; import ru.sfedu.mmcs.portfolio.swing.chart.data.DataSetFrontier; import ru.sfedu.mmcs.portfolio.swing.chart.data.DataSetOptimal; public class JFrontierChart extends ChartPanel { private static final long serialVersionUID = -4326820921442119966L; private JFrontierChart(JFreeChart chart) { super(chart); } private XYPlot _plot; private Frontier _frontier; public void refresh(AnalyzerData data) { if(data.getResult() != null) { _frontier = data.getResult(); DataSetFrontier frontierDataset = new DataSetFrontier(data.getResult().getFrontier()); DataSetActives portfolioDataset = new DataSetActives(data.getResult().getFrontier()); DataSetOptimal optimalDataset = new DataSetOptimal(data.getResult()); _plot.setDataset(frontierDataset); _plot.setDataset(1, portfolioDataset); _plot.setDataset(3, optimalDataset); _plot.getDomainAxis(0).setAutoRange(true); _plot.getRangeAxis(0).setAutoRange(true); } } public static JFrontierChart createFrontierChartPanel(String title, AnalyzerData data) { JFreeChart chart = ChartFactory.createXYLineChart(title, "μ", "V(μ)", null, PlotOrientation.VERTICAL, true, false, false); JFrontierChart chartPanel = new JFrontierChart(chart); chartPanel.setMouseWheelEnabled(true); chartPanel.setPopupMenu(chartPanel.createPopupMenu(false, true, true, true)); chartPanel._plot = chart.getXYPlot(); chartPanel._plot.addChangeListener(chartPanel.new FixAxisListner()); chartPanel.addChartMouseListener(chartPanel.new ChartClick()); NumberAxis axisActives = new NumberAxis("Активы"); axisActives.setRange(0.0, 1.0); axisActives.setAutoRange(false); chartPanel._plot.setRangeAxis(1,axisActives); chartPanel._plot.mapDatasetToRangeAxis(1,1); chartPanel._plot.mapDatasetToRangeAxis(2,0); chartPanel._plot.setRenderer(0, new StandardXYItemRenderer()); chartPanel._plot.setRenderer(1, new StandardXYItemRenderer()); ((AbstractRenderer) chartPanel._plot.getRenderer(1)).setAutoPopulateSeriesStroke(false); chartPanel._plot.getRenderer(1).setBaseStroke(new BasicStroke(1.5f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 5.0f, new float[] {2.0f,1.0f,0.5f,1.0f}, 0.0f)); chartPanel._plot.setRenderer(2, new XYLineAndShapeRenderer()); ((AbstractRenderer) chartPanel._plot.getRenderer(2)).setAutoPopulateSeriesShape(false); ((AbstractRenderer) chartPanel._plot.getRenderer(2)).setAutoPopulateSeriesPaint(false); chartPanel._plot.getRenderer(2).setBaseShape(ShapeUtilities.createDiagonalCross(3, 1)); chartPanel._plot.getRenderer(2).setBasePaint(Color.BLACK); chartPanel._plot.getRenderer(2).setBaseSeriesVisibleInLegend(false); chartPanel._plot.setRenderer(3, new XYLineAndShapeRenderer()); ((AbstractRenderer) chartPanel._plot.getRenderer(3)).setAutoPopulateSeriesShape(false); chartPanel._plot.getRenderer(3).setBaseShape(new Ellipse2D.Double(-3, -3, 6, 6)); chartPanel.setHorizontalAxisTrace(true); chartPanel.refresh(data); return chartPanel; } private class FixAxisListner implements PlotChangeListener{ private boolean _changed = false; @Override public void plotChanged(PlotChangeEvent arg0) { if(!_changed) { _changed = true; ((XYPlot)arg0.getPlot()).getRangeAxis(1).setRange(0.0,1.0); } _changed = false; } }; private class ChartClick implements ChartMouseListener{ private Portfolio _data; @Override public void chartMouseClicked(ChartMouseEvent e) { if(_data != null) JFrontierChart.this.Events.fireEntityClick(new EventEntityClick(JFrontierChart.this, _data)); else if(JFrontierChart.this._frontier != null) { Point2D p = JFrontierChart.this.translateScreenToJava2D(e.getTrigger().getPoint()); Rectangle2D plotArea = JFrontierChart.this.getScreenDataArea(); XYPlot plot = JFrontierChart.this._plot; double chartX = plot.getDomainAxis().java2DToValue(p.getX(), plotArea, plot.getDomainAxisEdge()); //double chartY = plot.getRangeAxis().java2DToValue(p.getY(), plotArea, plot.getRangeAxisEdge()); JFrontierChart.this.Events.fireEntityClick( new EventEntityClick(JFrontierChart.this, JFrontierChart.this._frontier.calcPortfolio(new Vector2D(chartX, 0))) ); } } @Override public void chartMouseMoved(ChartMouseEvent e) { DefaultXYDataset dataPoint = new DefaultXYDataset(); _data = null; if(e.getEntity() instanceof XYItemEntity) { XYItemEntity ce = (XYItemEntity) e.getEntity(); if(ce.getDataset() instanceof DataSetOptimal) { double x = (double) ce.getDataset().getX(ce.getSeriesIndex(), ce.getItem()); double y = (double) ce.getDataset().getY(ce.getSeriesIndex(), ce.getItem()); if(ce.getDataset() instanceof DataSetOptimal) _data = ((DataSetOptimal)ce.getDataset()).getPortfolio(ce.getSeriesIndex(), ce.getItem()); dataPoint.addSeries("", new double[][] {{x},{y}}); } } JFrontierChart.this._plot.setDataset(2, dataPoint); } } public final EntityClickSource Events = new EntityClickSource(); }
right click fix
src/ru/sfedu/mmcs/portfolio/swing/chart/JFrontierChart.java
right click fix
Java
mit
7eb9425261363b66a23a3716204bbc8e159b0eda
0
trentech/StackBan
package com.gmail.trentech.stackban; import java.util.Optional; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import org.spongepowered.api.Sponge; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.block.BlockTypes; import org.spongepowered.api.data.DataContainer; import org.spongepowered.api.data.DataQuery; import org.spongepowered.api.data.Transaction; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.Order; import org.spongepowered.api.event.block.ChangeBlockEvent; import org.spongepowered.api.event.filter.Getter; import org.spongepowered.api.event.filter.cause.Root; import org.spongepowered.api.event.item.inventory.ChangeInventoryEvent; import org.spongepowered.api.event.item.inventory.ClickInventoryEvent; import org.spongepowered.api.event.item.inventory.DropItemEvent; import org.spongepowered.api.event.item.inventory.UseItemStackEvent; import org.spongepowered.api.event.world.LoadWorldEvent; import org.spongepowered.api.item.ItemTypes; import org.spongepowered.api.item.inventory.Inventory; import org.spongepowered.api.item.inventory.InventoryArchetypes; import org.spongepowered.api.item.inventory.ItemStack; import org.spongepowered.api.item.inventory.ItemStackSnapshot; import org.spongepowered.api.item.inventory.Slot; import org.spongepowered.api.item.inventory.crafting.CraftingOutput; import org.spongepowered.api.item.inventory.entity.PlayerInventory; import org.spongepowered.api.item.inventory.query.QueryOperationTypes; import org.spongepowered.api.item.inventory.transaction.SlotTransaction; import org.spongepowered.api.scheduler.Task; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.format.TextColors; import org.spongepowered.api.text.format.TextStyles; import org.spongepowered.api.text.serializer.TextSerializers; import org.spongepowered.api.world.World; import com.gmail.trentech.pjc.core.ConfigManager; import com.gmail.trentech.stackban.init.Action; import com.gmail.trentech.stackban.init.Common; import ninja.leaping.configurate.ConfigurationNode; public class EventListener { private static ConcurrentHashMap<UUID, Action> notifications = new ConcurrentHashMap<>(); @Listener public void onLoadWorldEvent(LoadWorldEvent event) { String worldName = event.getTargetWorld().getName(); Common.initConfig(worldName); } @Listener public void onChangeBlockEvent(ChangeBlockEvent.Place event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } for (Transaction<BlockSnapshot> transaction : event.getTransactions()) { BlockSnapshot snapshot = transaction.getFinal(); if (snapshot.getState().getType().equals(BlockTypes.AIR)) { continue; } ItemStack itemStack; try { itemStack = ItemStack.builder().fromBlockSnapshot(snapshot).build(); } catch (Exception e) { return; } if (isBanned(player, itemStack, Action.PLACE)) { log(player, itemStack, Action.PLACE); event.setCancelled(true); } } } @Listener public void onChangeBlockEvent(ChangeBlockEvent.Modify event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } for (Transaction<BlockSnapshot> transaction : event.getTransactions()) { BlockSnapshot snapshot = transaction.getFinal(); if (snapshot.getState().getType().equals(BlockTypes.AIR)) { continue; } ItemStack itemStack; try { itemStack = ItemStack.builder().fromBlockSnapshot(snapshot).build(); } catch (Exception e) { return; } if (isBanned(player, itemStack, Action.MODIFY)) { log(player, itemStack, Action.MODIFY); event.setCancelled(true); } } } @Listener public void onChangeBlockEvent(ChangeBlockEvent.Break event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } for (Transaction<BlockSnapshot> transaction : event.getTransactions()) { BlockSnapshot snapshot = transaction.getOriginal(); if (snapshot.getState().getType().equals(BlockTypes.AIR)) { continue; } ItemStack itemStack; try { itemStack = ItemStack.builder().fromBlockSnapshot(snapshot).build(); } catch (Exception e) { return; } if (isBanned(player, itemStack, Action.BREAK)) { log(player, itemStack, Action.BREAK); event.setCancelled(true); } } } @Listener public void onClickInventoryEvent(ClickInventoryEvent event, @Root Player player, @Getter("getTargetInventory") Inventory inventory) { if (player.hasPermission("stackban.admin")) { return; } if(!inventory.getArchetype().equals(InventoryArchetypes.PLAYER) && !inventory.getArchetype().equals(InventoryArchetypes.WORKBENCH)) { return; } CraftingOutput output = inventory.query(QueryOperationTypes.INVENTORY_TYPE.of(CraftingOutput.class)); for(Inventory s : output.slots()) { Slot slot = (Slot) s; Optional<ItemStack> optionalItem = slot.peek(); if(optionalItem.isPresent()) { if (isBanned(player, optionalItem.get(), Action.CRAFT)) { log(player, optionalItem.get(), Action.CRAFT); event.setCancelled(true); } } } } // @Listener(order = Order.POST) // public void onAffectSlotEvent(AffectSlotEvent event, @Root Player player) { // if (player.hasPermission("stackban.admin")) { // return; // } // // for(SlotTransaction transaction : event.getTransactions()) { // Slot slot = transaction.getSlot(); // Optional<ItemStack> optionalItem = slot.peek(); // // if(optionalItem.isPresent()) { // if (isBanned(player, optionalItem.get(), Action.CRAFT)) { // log(player, optionalItem.get(), Action.CRAFT); // // transaction.setValid(false); // event.setCancelled(true); // } // } // } // } @Listener public void onChangeInventoryEvent(ChangeInventoryEvent.Held event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } for (SlotTransaction transaction : event.getTransactions()) { ItemStack itemStack = transaction.getFinal().createStack(); if (itemStack.getType().equals(ItemTypes.NONE)) { continue; } if (isBanned(player, itemStack, Action.HOLD)) { PlayerInventory inv = player.getInventory().query(QueryOperationTypes.INVENTORY_TYPE.of(PlayerInventory.class)); for (Inventory item : inv.getHotbar().slots()) { Slot slot = (Slot) item; Optional<ItemStack> peek = slot.peek(); if(!peek.isPresent()) { continue; } if (isBanned(player, peek.get(), Action.HOLD)) { slot.clear(); } } for (Inventory item : inv.getMain().slots()) { Slot slot = (Slot) item; Optional<ItemStack> peek = slot.peek(); if(!peek.isPresent()) { continue; } if (isBanned(player, peek.get(), Action.HOLD)) { slot.clear(); } } log(player, itemStack, Action.HOLD); return; } } } @Listener(order = Order.POST) public void onChangeInventoryEvent(ChangeInventoryEvent.Pickup event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } for (SlotTransaction transaction : event.getTransactions()) { ItemStack itemStack = transaction.getFinal().createStack(); if (itemStack.getType().equals(ItemTypes.NONE)) { continue; } if (isBanned(player, itemStack, Action.PICKUP)) { log(player, itemStack, Action.PICKUP); event.setCancelled(true); return; } } } @Listener public void onDropItemEvent(DropItemEvent.Pre event, @Root Player player) { for (ItemStackSnapshot snapshot : event.getDroppedItems()) { ItemStack itemStack = snapshot.createStack(); if (itemStack.getType().equals(ItemTypes.NONE)) { continue; } if (isBanned(player, itemStack, Action.DROP)) { log(player, itemStack, Action.DROP); event.setCancelled(true); } } } @Listener public void onUseItemStackEvent(UseItemStackEvent.Start event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } ItemStack itemStack = event.getItemStackInUse().createStack(); if (itemStack.getType().equals(ItemTypes.NONE)) { return; } if (isBanned(player, itemStack, Action.USE)) { log(player, itemStack, Action.USE); event.setCancelled(true); } } private boolean isBanned(Player player, ItemStack itemStack, Action action) { World world = player.getWorld(); String itemType = itemStack.getType().getId(); DataContainer container = itemStack.toContainer(); DataQuery query = DataQuery.of('/', "UnsafeDamage"); if (player.hasPermission("stackban.bypass." + itemType + ":" + container.get(query).get().toString()) || player.hasPermission("stackban.bypass." + itemType)) { return false; } ConfigurationNode config = ConfigManager.get(Main.getPlugin(), world.getName()).getConfig(); if (!config.getNode("items", itemType + ":" + container.get(query).get().toString()).isVirtual()) { return !config.getNode("items", itemType + ":" + container.get(query).get().toString(), action.getName()).getBoolean(); } if (!config.getNode("items", itemType).isVirtual()) { return !config.getNode("items", itemType, action.getName()).getBoolean(); } config = ConfigManager.get(Main.getPlugin(), "global").getConfig(); if (!config.getNode("items", itemType + ":" + container.get(query).get().toString()).isVirtual()) { return !config.getNode("items", itemType + ":" + container.get(query).get().toString(), action.getName()).getBoolean(); } if (!config.getNode("items", itemType).isVirtual()) { return !config.getNode("items", itemType, action.getName()).getBoolean(); } return false; } public static void log(Player player, ItemStack itemStack, Action action) { UUID uuid = player.getUniqueId(); if(!notifications.containsKey(uuid)) { notifications.put(uuid, action); ConfigurationNode config = ConfigManager.get(Main.getPlugin()).getConfig(); player.sendMessage(TextSerializers.FORMATTING_CODE.deserialize(config.getNode("player_message").getString().replaceAll("%ITEM%", itemStack.getTranslation().get()).replaceAll("%ACTION%", action.getName()))); if (config.getNode("console_log").getBoolean()) { String itemType = itemStack.getType().getId(); DataContainer container = itemStack.toContainer(); DataQuery query = DataQuery.of('/', "UnsafeDamage"); int unsafeDamage = Integer.parseInt(container.get(query).get().toString()); String message = config.getNode("log_message").getString().replaceAll("%PLAYER%", player.getName()).replaceAll("%ACTION%", action.getName()); if (unsafeDamage != 0) { message = message.replaceAll("%ITEM%", itemType + ":" + container.get(query).get().toString()); } else { message = message.replaceAll("%ITEM%", itemType); } Main.instance().getLog().info(message); for (Player p : Sponge.getServer().getOnlinePlayers()) { if (p.hasPermission("stackban.log")) { p.sendMessage(Text.of(TextStyles.ITALIC, TextColors.GRAY, message)); } } } Task.builder().delayTicks(40).execute(c -> { notifications.remove(uuid);}).submit(Main.getPlugin()); } } }
src/main/java/com/gmail/trentech/stackban/EventListener.java
package com.gmail.trentech.stackban; import java.util.Optional; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import org.spongepowered.api.Sponge; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.block.BlockTypes; import org.spongepowered.api.data.DataContainer; import org.spongepowered.api.data.DataQuery; import org.spongepowered.api.data.Transaction; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.Order; import org.spongepowered.api.event.block.ChangeBlockEvent; import org.spongepowered.api.event.filter.Getter; import org.spongepowered.api.event.filter.cause.Root; import org.spongepowered.api.event.item.inventory.ChangeInventoryEvent; import org.spongepowered.api.event.item.inventory.ClickInventoryEvent; import org.spongepowered.api.event.item.inventory.DropItemEvent; import org.spongepowered.api.event.item.inventory.UseItemStackEvent; import org.spongepowered.api.event.world.LoadWorldEvent; import org.spongepowered.api.item.ItemTypes; import org.spongepowered.api.item.inventory.Inventory; import org.spongepowered.api.item.inventory.InventoryArchetypes; import org.spongepowered.api.item.inventory.ItemStack; import org.spongepowered.api.item.inventory.ItemStackSnapshot; import org.spongepowered.api.item.inventory.Slot; import org.spongepowered.api.item.inventory.crafting.CraftingOutput; import org.spongepowered.api.item.inventory.entity.PlayerInventory; import org.spongepowered.api.item.inventory.transaction.SlotTransaction; import org.spongepowered.api.scheduler.Task; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.format.TextColors; import org.spongepowered.api.text.format.TextStyles; import org.spongepowered.api.text.serializer.TextSerializers; import org.spongepowered.api.world.World; import com.gmail.trentech.pjc.core.ConfigManager; import com.gmail.trentech.stackban.init.Action; import com.gmail.trentech.stackban.init.Common; import ninja.leaping.configurate.ConfigurationNode; public class EventListener { private static ConcurrentHashMap<UUID, Action> notifications = new ConcurrentHashMap<>(); @Listener public void onLoadWorldEvent(LoadWorldEvent event) { String worldName = event.getTargetWorld().getName(); Common.initConfig(worldName); } @Listener public void onChangeBlockEvent(ChangeBlockEvent.Place event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } for (Transaction<BlockSnapshot> transaction : event.getTransactions()) { BlockSnapshot snapshot = transaction.getFinal(); if (snapshot.getState().getType().equals(BlockTypes.AIR)) { continue; } ItemStack itemStack; try { itemStack = ItemStack.builder().fromBlockSnapshot(snapshot).build(); } catch (Exception e) { return; } if (isBanned(player, itemStack, Action.PLACE)) { log(player, itemStack, Action.PLACE); event.setCancelled(true); } } } @Listener public void onChangeBlockEvent(ChangeBlockEvent.Modify event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } for (Transaction<BlockSnapshot> transaction : event.getTransactions()) { BlockSnapshot snapshot = transaction.getFinal(); if (snapshot.getState().getType().equals(BlockTypes.AIR)) { continue; } ItemStack itemStack; try { itemStack = ItemStack.builder().fromBlockSnapshot(snapshot).build(); } catch (Exception e) { return; } if (isBanned(player, itemStack, Action.MODIFY)) { log(player, itemStack, Action.MODIFY); event.setCancelled(true); } } } @Listener public void onChangeBlockEvent(ChangeBlockEvent.Break event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } for (Transaction<BlockSnapshot> transaction : event.getTransactions()) { BlockSnapshot snapshot = transaction.getOriginal(); if (snapshot.getState().getType().equals(BlockTypes.AIR)) { continue; } ItemStack itemStack; try { itemStack = ItemStack.builder().fromBlockSnapshot(snapshot).build(); } catch (Exception e) { return; } if (isBanned(player, itemStack, Action.BREAK)) { log(player, itemStack, Action.BREAK); event.setCancelled(true); } } } @Listener public void onClickInventoryEvent(ClickInventoryEvent event, @Root Player player, @Getter("getTargetInventory") Inventory inventory) { if (player.hasPermission("stackban.admin")) { return; } if(!inventory.getArchetype().equals(InventoryArchetypes.PLAYER) && !inventory.getArchetype().equals(InventoryArchetypes.WORKBENCH)) { return; } CraftingOutput output = inventory.query(CraftingOutput.class); for(Inventory s : output.slots()) { Slot slot = (Slot) s; Optional<ItemStack> optionalItem = slot.peek(); if(optionalItem.isPresent()) { if (isBanned(player, optionalItem.get(), Action.CRAFT)) { log(player, optionalItem.get(), Action.CRAFT); event.setCancelled(true); } } } } // @Listener(order = Order.POST) // public void onAffectSlotEvent(AffectSlotEvent event, @Root Player player) { // if (player.hasPermission("stackban.admin")) { // return; // } // // for(SlotTransaction transaction : event.getTransactions()) { // Slot slot = transaction.getSlot(); // Optional<ItemStack> optionalItem = slot.peek(); // // if(optionalItem.isPresent()) { // if (isBanned(player, optionalItem.get(), Action.CRAFT)) { // log(player, optionalItem.get(), Action.CRAFT); // // transaction.setValid(false); // event.setCancelled(true); // } // } // } // } @Listener public void onChangeInventoryEvent(ChangeInventoryEvent.Held event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } for (SlotTransaction transaction : event.getTransactions()) { ItemStack itemStack = transaction.getFinal().createStack(); if (itemStack.getType().equals(ItemTypes.NONE)) { continue; } if (isBanned(player, itemStack, Action.HOLD)) { PlayerInventory inv = player.getInventory().query(PlayerInventory.class); for (Inventory item : inv.getHotbar().slots()) { Slot slot = (Slot) item; Optional<ItemStack> peek = slot.peek(); if(!peek.isPresent()) { continue; } if (isBanned(player, peek.get(), Action.HOLD)) { slot.clear(); } } for (Inventory item : inv.getMain().slots()) { Slot slot = (Slot) item; Optional<ItemStack> peek = slot.peek(); if(!peek.isPresent()) { continue; } if (isBanned(player, peek.get(), Action.HOLD)) { slot.clear(); } } log(player, itemStack, Action.HOLD); return; } } } @Listener(order = Order.POST) public void onChangeInventoryEvent(ChangeInventoryEvent.Pickup event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } for (SlotTransaction transaction : event.getTransactions()) { ItemStack itemStack = transaction.getFinal().createStack(); if (itemStack.getType().equals(ItemTypes.NONE)) { continue; } if (isBanned(player, itemStack, Action.PICKUP)) { log(player, itemStack, Action.PICKUP); event.setCancelled(true); return; } } } @Listener public void onDropItemEvent(DropItemEvent.Pre event, @Root Player player) { for (ItemStackSnapshot snapshot : event.getDroppedItems()) { ItemStack itemStack = snapshot.createStack(); if (itemStack.getType().equals(ItemTypes.NONE)) { continue; } if (isBanned(player, itemStack, Action.DROP)) { log(player, itemStack, Action.DROP); event.setCancelled(true); } } } @Listener public void onUseItemStackEvent(UseItemStackEvent.Start event, @Root Player player) { if (player.hasPermission("stackban.admin")) { return; } ItemStack itemStack = event.getItemStackInUse().createStack(); if (itemStack.getType().equals(ItemTypes.NONE)) { return; } if (isBanned(player, itemStack, Action.USE)) { log(player, itemStack, Action.USE); event.setCancelled(true); } } private boolean isBanned(Player player, ItemStack itemStack, Action action) { World world = player.getWorld(); String itemType = itemStack.getType().getId(); DataContainer container = itemStack.toContainer(); DataQuery query = DataQuery.of('/', "UnsafeDamage"); if (player.hasPermission("stackban.bypass." + itemType + ":" + container.get(query).get().toString()) || player.hasPermission("stackban.bypass." + itemType)) { return false; } ConfigurationNode config = ConfigManager.get(Main.getPlugin(), world.getName()).getConfig(); if (!config.getNode("items", itemType + ":" + container.get(query).get().toString()).isVirtual()) { return !config.getNode("items", itemType + ":" + container.get(query).get().toString(), action.getName()).getBoolean(); } if (!config.getNode("items", itemType).isVirtual()) { return !config.getNode("items", itemType, action.getName()).getBoolean(); } config = ConfigManager.get(Main.getPlugin(), "global").getConfig(); if (!config.getNode("items", itemType + ":" + container.get(query).get().toString()).isVirtual()) { return !config.getNode("items", itemType + ":" + container.get(query).get().toString(), action.getName()).getBoolean(); } if (!config.getNode("items", itemType).isVirtual()) { return !config.getNode("items", itemType, action.getName()).getBoolean(); } return false; } public static void log(Player player, ItemStack itemStack, Action action) { UUID uuid = player.getUniqueId(); if(!notifications.containsKey(uuid)) { notifications.put(uuid, action); ConfigurationNode config = ConfigManager.get(Main.getPlugin()).getConfig(); player.sendMessage(TextSerializers.FORMATTING_CODE.deserialize(config.getNode("player_message").getString().replaceAll("%ITEM%", itemStack.getTranslation().get()).replaceAll("%ACTION%", action.getName()))); if (config.getNode("console_log").getBoolean()) { String itemType = itemStack.getType().getId(); DataContainer container = itemStack.toContainer(); DataQuery query = DataQuery.of('/', "UnsafeDamage"); int unsafeDamage = Integer.parseInt(container.get(query).get().toString()); String message = config.getNode("log_message").getString().replaceAll("%PLAYER%", player.getName()).replaceAll("%ACTION%", action.getName()); if (unsafeDamage != 0) { message = message.replaceAll("%ITEM%", itemType + ":" + container.get(query).get().toString()); } else { message = message.replaceAll("%ITEM%", itemType); } Main.instance().getLog().info(message); for (Player p : Sponge.getServer().getOnlinePlayers()) { if (p.hasPermission("stackban.log")) { p.sendMessage(Text.of(TextStyles.ITALIC, TextColors.GRAY, message)); } } } Task.builder().delayTicks(40).execute(c -> { notifications.remove(uuid);}).submit(Main.getPlugin()); } } }
Update deprecated stuff - untested
src/main/java/com/gmail/trentech/stackban/EventListener.java
Update deprecated stuff - untested
Java
mit
b69959ef14d7adb35453e6ab7590661859ebb18a
0
Sevenflanks/linebot-operator-next
package next.operator.linebot.talker; import com.linecorp.bot.client.LineMessagingClient; import com.linecorp.bot.model.event.MessageEvent; import com.linecorp.bot.model.event.message.TextMessageContent; import next.operator.currency.enums.CurrencyType; import next.operator.currency.model.CurrencyExrateModel; import next.operator.currency.service.CurrencyService; import next.operator.linebot.executor.impl.ExrateExecutor; import next.operator.linebot.service.RespondentService; import next.operator.linebot.service.RespondentTalkable; import next.operator.utils.NumberUtils; import org.ansj.domain.Term; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.math.BigDecimal; import java.util.List; import java.util.Optional; import java.util.function.Consumer; /** * 當對話中出現錢幣關鍵字時提供匯率資料 */ @Service public class ExrateTalker implements RespondentTalkable { @Autowired private ExrateExecutor exrateExecutor; @Autowired private CurrencyService currencyService; private ThreadLocal<Term> currentMached = new ThreadLocal<>(); private ThreadLocal<Double> currentAmount = new ThreadLocal<>(); @Override public boolean isReadable(String message) { // 檢查是否有存在符合幣別的單字 final List<Term> terms = RespondentService.currentTern.get(); final Optional<Term> matchedTerm = terms.stream() .filter(t -> { final Optional<CurrencyType> currencyType = CurrencyType.tryParseByName(t.getName()); return currencyType.filter(type -> CurrencyType.TWD != type).isPresent(); }) .findFirst(); final boolean matched = matchedTerm.isPresent(); if (matched) { // 檢查是否存在數字 final double sum = terms.stream() .filter(t -> "m".equals(t.getNatureStr()) || "nw".equals(t.getNatureStr())) .mapToDouble(t -> Optional.ofNullable(NumberUtils.tryDouble(t.getName())).orElseGet(() -> Optional.ofNullable(NumberUtils.zhNumConvertToInt(t.getName())).orElse(1D))) .sum(); currentAmount.set(sum > 0 ? sum : 1); currentMached.set(matchedTerm.get()); } else { currentMached.remove(); currentAmount.remove(); } return matched; } @Override public Consumer<MessageEvent<TextMessageContent>> doFirst(LineMessagingClient client) { return null; } @Override public String talk(String message) { final Term term = currentMached.get(); final Double amount = Optional.ofNullable(currentAmount.get()).orElse(1D); currentMached.remove(); currentAmount.remove(); final CurrencyType matchedCurrenctType = CurrencyType.tryParseByName(term.getName()).get(); // isReadable已經檢查過,必定有值 final CurrencyExrateModel exrate = currencyService.getExrate(matchedCurrenctType.name(), CurrencyType.TWD.name()); return "我感覺到了你想知道" + matchedCurrenctType.getFirstLocalName() + "的匯率!\n" + exrateExecutor.decimalFormat.format(BigDecimal.valueOf(amount)) + " " + exrate.getExFrom() + " = " + exrateExecutor.decimalFormat.format(exrate.getExrate().multiply(BigDecimal.valueOf(amount))) + " " + exrate.getExTo() + ", 資料時間:" + exrateExecutor.dateTimeFormatter.format(exrate.getTime()); } }
src/main/java/next/operator/linebot/talker/ExrateTalker.java
package next.operator.linebot.talker; import com.linecorp.bot.client.LineMessagingClient; import com.linecorp.bot.model.event.MessageEvent; import com.linecorp.bot.model.event.message.TextMessageContent; import next.operator.currency.enums.CurrencyType; import next.operator.currency.model.CurrencyExrateModel; import next.operator.currency.service.CurrencyService; import next.operator.linebot.executor.impl.ExrateExecutor; import next.operator.linebot.service.RespondentService; import next.operator.linebot.service.RespondentTalkable; import next.operator.utils.NumberUtils; import org.ansj.domain.Term; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.math.BigDecimal; import java.util.List; import java.util.Optional; import java.util.function.Consumer; /** * 當對話中出現錢幣關鍵字時提供匯率資料 */ @Service public class ExrateTalker implements RespondentTalkable { @Autowired private ExrateExecutor exrateExecutor; @Autowired private CurrencyService currencyService; private ThreadLocal<Term> currentMached = new ThreadLocal<>(); private ThreadLocal<Double> currentAmount = new ThreadLocal<>(); @Override public boolean isReadable(String message) { // 檢查是否有存在符合幣別的單字 final List<Term> terms = RespondentService.currentTern.get(); final Optional<Term> matchedTerm = terms.stream() .filter(t -> { final Optional<CurrencyType> currencyType = CurrencyType.tryParseByName(t.getName()); return currencyType.filter(type -> CurrencyType.TWD != type).isPresent(); }) .findFirst(); final boolean matched = matchedTerm.isPresent(); if (matched) { // 檢查是否存在數字 final double sum = terms.stream() .filter(t -> "m".equals(t.getNatureStr()) || "nw".equals(t.getNatureStr())) .mapToDouble(t -> Optional.ofNullable(NumberUtils.tryDouble(t.getName())).orElseGet(() -> Optional.ofNullable(NumberUtils.zhNumConvertToInt(t.getName())).orElse(1D))) .sum(); currentAmount.set(sum); currentMached.set(matchedTerm.get()); } else { currentMached.remove(); currentAmount.remove(); } return matched; } @Override public Consumer<MessageEvent<TextMessageContent>> doFirst(LineMessagingClient client) { return null; } @Override public String talk(String message) { final Term term = currentMached.get(); final Double amount = Optional.ofNullable(currentAmount.get()).orElse(1D); currentMached.remove(); currentAmount.remove(); final CurrencyType matchedCurrenctType = CurrencyType.tryParseByName(term.getName()).get(); // isReadable已經檢查過,必定有值 final CurrencyExrateModel exrate = currencyService.getExrate(matchedCurrenctType.name(), CurrencyType.TWD.name()); return "我感覺到了你想知道" + matchedCurrenctType.getFirstLocalName() + "的匯率!\n" + exrateExecutor.decimalFormat.format(BigDecimal.valueOf(amount)) + " " + exrate.getExFrom() + " = " + exrateExecutor.decimalFormat.format(exrate.getExrate().multiply(BigDecimal.valueOf(amount))) + " " + exrate.getExTo() + ", 資料時間:" + exrateExecutor.dateTimeFormatter.format(exrate.getTime()); } }
避免猜數字加總為0
src/main/java/next/operator/linebot/talker/ExrateTalker.java
避免猜數字加總為0
Java
mit
4ac3fa41f8233c9536d655d6e923110d8ac857e4
0
venkatramanm/swf-all,venkatramanm/swf-all,venkatramanm/swf-all
package com.venky.swf.integration.api; import com.venky.core.collections.IgnoreCaseMap; import com.venky.core.io.ByteArrayInputStream; import com.venky.core.string.StringUtil; import com.venky.core.util.Bucket; import com.venky.swf.db.Database; import com.venky.swf.db.annotations.column.ui.mimes.MimeType; import com.venky.swf.integration.FormatHelper; import com.venky.swf.routing.Config; import com.venky.xml.XMLDocument; import org.apache.commons.io.input.ReaderInputStream; import org.json.simple.JSONAware; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import java.io.*; import java.lang.reflect.ParameterizedType; import java.net.*; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.zip.GZIPInputStream; public class Call<T> { HttpMethod method = HttpMethod.GET; InputFormat inputFormat = InputFormat.FORM_FIELDS; String url ; Map<String, String> requestHeaders = new HashMap<>(); public Map<String, List<String>> getResponseHeaders() { return responseHeaders; } Map<String, List<String>> responseHeaders = new IgnoreCaseMap<>(); ByteArrayInputStream responseStream = null; ByteArrayInputStream errorStream = null; T input; private void checkExpired(){ if (responseStream != null){ throw new RuntimeException("Call already used once. Create another instance of Call Object"); } } public Call<T> method(HttpMethod method){ checkExpired(); this.method = method; return this; } public Call<T> inputFormat(InputFormat format){ checkExpired(); this.inputFormat = format; return this; } public Call<T> url(String url){ checkExpired(); this.url = url; return this; } public Call<T> url(String baseUrl, String relativeUrl){ checkExpired(); StringBuilder sUrl = new StringBuilder(); if (baseUrl.endsWith("/")) { sUrl.append(baseUrl.substring(0, baseUrl.length()-1)); }else { sUrl.append(baseUrl); } if (relativeUrl.startsWith("/")){ sUrl.append(relativeUrl); }else { sUrl.append("/").append(relativeUrl); } return url(sUrl.toString()); } public Call<T> headers(Map<String,String> requestHeaders){ checkExpired(); requestHeaders.forEach((k,v)->{ this.requestHeaders.put(k,v); }); return this; } public Call<T> header(String key, String value){ checkExpired(); this.requestHeaders.put(key,value); return this; } public Call<T> input(T input){ checkExpired(); this.input = input; return this; } public Call(){ } int timeOut = 60000; public Call<T> timeOut(int timeOut){ checkExpired(); this.timeOut = timeOut; return this; } private boolean beingRedirected = false; private boolean isBeingRedirected(){ if (responseStream == null){ invoke(); } return beingRedirected; } private String redirectedUrl = null; public String getRedirectedUrl(){ if (responseStream == null){ invoke(); } return redirectedUrl; } private Call<T> invoke(){ checkExpired(); if (method == HttpMethod.GET && inputFormat != InputFormat.FORM_FIELDS) { throw new RuntimeException("Cannot call API using Method " + method + " and parameter as " + inputFormat ); } URL curl; HttpURLConnection connection = null; StringBuilder fakeCurlRequest = new StringBuilder(); try { StringBuilder sUrl = new StringBuilder(); sUrl.append(url); String parameterString = inputFormat == InputFormat.JSON ? getParametersAsJSONString(input) : (inputFormat == InputFormat.XML ? getParametersAsXMLString(input): (inputFormat == InputFormat.FORM_FIELDS ? getParametersAsFormFields(input) : "")); if (method == HttpMethod.GET && parameterString.length() > 0) { if (sUrl.lastIndexOf("?") < 0) { sUrl.append("?"); }else { sUrl.append("&"); } sUrl.append(parameterString); } fakeCurlRequest.append("Request ").append(":\n curl "); curl = new URL(sUrl.toString()); connection = (HttpURLConnection)(curl.openConnection()); connection.setConnectTimeout(timeOut); connection.setReadTimeout(timeOut); connection.setRequestMethod(method.toString()); connection.setRequestProperty("Accept-Encoding", "gzip"); for (String k : requestHeaders.keySet()) { String v = requestHeaders.get(k); connection.setRequestProperty(k, v); fakeCurlRequest.append(" -H '").append(k).append(": ").append(v).append("' "); }; connection.setDoOutput(true); connection.setDoInput(true); fakeCurlRequest.append("'").append(sUrl).append("'"); fakeCurlRequest.append(" "); if (method != HttpMethod.GET) { byte[] parameterByteArray = inputFormat == InputFormat.INPUT_STREAM ? getParameterRaw(input) : parameterString.getBytes(); if (inputFormat == InputFormat.INPUT_STREAM){ fakeCurlRequest.append("-d '").append("**Raw binary Stream**").append("'"); }else { fakeCurlRequest.append("-d '").append(parameterString).append("'"); } connection.getOutputStream().write(parameterByteArray); } if (connection.getResponseCode() >= 200 && connection.getResponseCode() < 299 ) { //2xx is success.!! InputStream in = null; if (connection.getContentEncoding()!=null && connection.getContentEncoding().equals("gzip")) { in = new GZIPInputStream(connection.getInputStream()); }else { in = connection.getInputStream(); } responseHeaders.putAll(connection.getHeaderFields()); responseStream = new ByteArrayInputStream(StringUtil.readBytes(in)); errorStream= new ByteArrayInputStream(new byte[]{}); this.hasErrors = false; }else if (connection.getResponseCode() == HttpURLConnection.HTTP_MOVED_TEMP || connection.getResponseCode() == HttpURLConnection.HTTP_MOVED_PERM ){ redirectedUrl = connection.getHeaderField("Location"); beingRedirected = true; responseStream = new ByteArrayInputStream(new byte[]{}); errorStream = new ByteArrayInputStream(new byte[]{}); hasErrors = false; }else { errorStream = new ByteArrayInputStream(StringUtil.readBytes(connection.getErrorStream())); responseStream = new ByteArrayInputStream(new byte[] {}); this.hasErrors = true; } if (responseStream.available()> 0){ fakeCurlRequest.append("\n Response:\n"); String contentType = responseHeaders.get("content-type").isEmpty() ? MimeType.TEXT_PLAIN.toString() : responseHeaders.get("content-type").get(0); if (contentType.equals(MimeType.APPLICATION_JSON.toString()) || contentType.contains(MimeType.APPLICATION_XML.toString()) || contentType.startsWith("text")){ fakeCurlRequest.append(StringUtil.read(responseStream,true)); }else { fakeCurlRequest.append("**Raw binary Stream**"); } }else if (errorStream.available() >0){ fakeCurlRequest.append("\n Error:\n"); fakeCurlRequest.append(StringUtil.read(errorStream,true)); } Config.instance().getLogger(getClass().getName()).info(fakeCurlRequest.toString()); return this; } catch (IOException e) { throw new RuntimeException(e); //Soften the exception. }finally { if (connection != null) { connection.disconnect(); } } } boolean hasErrors = false; public boolean hasErrors(){ if (responseStream == null){ invoke(); } return this.hasErrors ; } @SuppressWarnings("unchecked") public <J extends JSONAware> J getResponseAsJson(){ return (J)JSONValue.parse(new InputStreamReader(getResponseStream())); } public XMLDocument getResponseAsXML(){ return XMLDocument.getDocumentFor(getResponseStream()); } public InputStream getResponseStream() { if (responseStream == null){ invoke(); } return responseStream; } public InputStream getErrorStream() { if (responseStream == null){ invoke(); } return errorStream; } public String getError(){ return Database.getJdbcTypeHelper("").getTypeRef(InputStream.class).getTypeConverter().toString(getErrorStream()); } @SuppressWarnings("unchecked") private String getParametersAsFormFields(Object p) { if (p == null) { return "" ; } Map parameters = (Map)p; StringBuilder q = new StringBuilder(); Bucket pCount = new Bucket(); parameters.forEach((k,v)->{ if (pCount.intValue() > 0) { q.append("&"); } String key = (String)k; String value; try { if (v instanceof List){ StringBuilder csv = new StringBuilder(); ((List) v).forEach(entry->{ if (csv.length() > 0){ csv.append(","); } csv.append(entry); }); value = URLEncoder.encode(csv.toString(), "utf-8"); }else { value = URLEncoder.encode(String.valueOf(v), "utf-8"); } } catch (UnsupportedEncodingException e) { value = String.valueOf(v); } if (q.indexOf(key+"=") < 0) { q.append(key).append("=").append(value); } pCount.increment(); }); return q.toString(); } private String getParametersAsJSONString(Object p) { return ((JSONAware)p).toString(); } private String getParametersAsXMLString(Object p) { return ((XMLDocument)p).toString(); } private byte[] getParameterRaw(Object p){ if (p instanceof InputStream){ return StringUtil.readBytes((InputStream)p); }else if (p instanceof Reader) { return StringUtil.readBytes(new ReaderInputStream((Reader)p)); }else if (p instanceof byte[]){ return (byte[])p; }else { throw new RuntimeException("unknown raw parameter" + p.getClass()); } } }
swf-db/src/main/java/com/venky/swf/integration/api/Call.java
package com.venky.swf.integration.api; import com.venky.core.collections.IgnoreCaseMap; import com.venky.core.io.ByteArrayInputStream; import com.venky.core.string.StringUtil; import com.venky.core.util.Bucket; import com.venky.swf.db.Database; import com.venky.swf.db.annotations.column.ui.mimes.MimeType; import com.venky.swf.integration.FormatHelper; import com.venky.swf.routing.Config; import com.venky.xml.XMLDocument; import org.apache.commons.io.input.ReaderInputStream; import org.json.simple.JSONAware; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import java.io.*; import java.lang.reflect.ParameterizedType; import java.net.*; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.zip.GZIPInputStream; public class Call<T> { HttpMethod method = HttpMethod.GET; InputFormat inputFormat = InputFormat.FORM_FIELDS; String url ; Map<String, String> requestHeaders = new HashMap<>(); public Map<String, List<String>> getResponseHeaders() { return responseHeaders; } Map<String, List<String>> responseHeaders = new IgnoreCaseMap<>(); ByteArrayInputStream responseStream = null; ByteArrayInputStream errorStream = null; T input; private void checkExpired(){ if (responseStream != null){ throw new RuntimeException("Call already used once. Create another instance of Call Object"); } } public Call<T> method(HttpMethod method){ checkExpired(); this.method = method; return this; } public Call<T> inputFormat(InputFormat format){ checkExpired(); this.inputFormat = format; return this; } public Call<T> url(String url){ checkExpired(); this.url = url; return this; } public Call<T> url(String baseUrl, String relativeUrl){ checkExpired(); StringBuilder sUrl = new StringBuilder(); if (baseUrl.endsWith("/")) { sUrl.append(baseUrl.substring(0, baseUrl.length()-1)); }else { sUrl.append(baseUrl); } if (relativeUrl.startsWith("/")){ sUrl.append(relativeUrl); }else { sUrl.append("/").append(relativeUrl); } return url(sUrl.toString()); } public Call<T> headers(Map<String,String> requestHeaders){ checkExpired(); requestHeaders.forEach((k,v)->{ this.requestHeaders.put(k,v); }); return this; } public Call<T> header(String key, String value){ checkExpired(); this.requestHeaders.put(key,value); return this; } public Call<T> input(T input){ checkExpired(); this.input = input; return this; } public Call(){ } int timeOut = 60000; public Call<T> timeOut(int timeOut){ checkExpired(); this.timeOut = timeOut; return this; } private boolean beingRedirected = false; private boolean isBeingRedirected(){ return beingRedirected; } private String redirectedUrl = null; public String getRedirectedUrl(){ return redirectedUrl; } private Call<T> invoke(){ checkExpired(); if (method == HttpMethod.GET && inputFormat != InputFormat.FORM_FIELDS) { throw new RuntimeException("Cannot call API using Method " + method + " and parameter as " + inputFormat ); } URL curl; HttpURLConnection connection = null; StringBuilder fakeCurlRequest = new StringBuilder(); try { StringBuilder sUrl = new StringBuilder(); sUrl.append(url); String parameterString = inputFormat == InputFormat.JSON ? getParametersAsJSONString(input) : (inputFormat == InputFormat.XML ? getParametersAsXMLString(input): (inputFormat == InputFormat.FORM_FIELDS ? getParametersAsFormFields(input) : "")); if (method == HttpMethod.GET && parameterString.length() > 0) { if (sUrl.lastIndexOf("?") < 0) { sUrl.append("?"); }else { sUrl.append("&"); } sUrl.append(parameterString); } fakeCurlRequest.append("Request ").append(":\n curl "); curl = new URL(sUrl.toString()); connection = (HttpURLConnection)(curl.openConnection()); connection.setConnectTimeout(timeOut); connection.setReadTimeout(timeOut); connection.setRequestMethod(method.toString()); connection.setRequestProperty("Accept-Encoding", "gzip"); for (String k : requestHeaders.keySet()) { String v = requestHeaders.get(k); connection.setRequestProperty(k, v); fakeCurlRequest.append(" -H '").append(k).append(": ").append(v).append("' "); }; connection.setDoOutput(true); connection.setDoInput(true); fakeCurlRequest.append("'").append(sUrl).append("'"); fakeCurlRequest.append(" "); if (method != HttpMethod.GET) { byte[] parameterByteArray = inputFormat == InputFormat.INPUT_STREAM ? getParameterRaw(input) : parameterString.getBytes(); if (inputFormat == InputFormat.INPUT_STREAM){ fakeCurlRequest.append("-d '").append("**Raw binary Stream**").append("'"); }else { fakeCurlRequest.append("-d '").append(parameterString).append("'"); } connection.getOutputStream().write(parameterByteArray); } if (connection.getResponseCode() >= 200 && connection.getResponseCode() < 299 ) { //2xx is success.!! InputStream in = null; if (connection.getContentEncoding()!=null && connection.getContentEncoding().equals("gzip")) { in = new GZIPInputStream(connection.getInputStream()); }else { in = connection.getInputStream(); } responseHeaders.putAll(connection.getHeaderFields()); responseStream = new ByteArrayInputStream(StringUtil.readBytes(in)); errorStream= new ByteArrayInputStream(new byte[]{}); this.hasErrors = false; }else if (connection.getResponseCode() == HttpURLConnection.HTTP_MOVED_TEMP || connection.getResponseCode() == HttpURLConnection.HTTP_MOVED_PERM ){ redirectedUrl = connection.getHeaderField("Location"); beingRedirected = true; responseStream = new ByteArrayInputStream(new byte[]{}); errorStream = new ByteArrayInputStream(new byte[]{}); hasErrors = false; }else { errorStream = new ByteArrayInputStream(StringUtil.readBytes(connection.getErrorStream())); responseStream = new ByteArrayInputStream(new byte[] {}); this.hasErrors = true; } if (responseStream.available()> 0){ fakeCurlRequest.append("\n Response:\n"); String contentType = responseHeaders.get("content-type").isEmpty() ? MimeType.TEXT_PLAIN.toString() : responseHeaders.get("content-type").get(0); if (contentType.equals(MimeType.APPLICATION_JSON.toString()) || contentType.contains(MimeType.APPLICATION_XML.toString()) || contentType.startsWith("text")){ fakeCurlRequest.append(StringUtil.read(responseStream,true)); }else { fakeCurlRequest.append("**Raw binary Stream**"); } }else if (errorStream.available() >0){ fakeCurlRequest.append("\n Error:\n"); fakeCurlRequest.append(StringUtil.read(errorStream,true)); } Config.instance().getLogger(getClass().getName()).info(fakeCurlRequest.toString()); return this; } catch (IOException e) { throw new RuntimeException(e); //Soften the exception. }finally { if (connection != null) { connection.disconnect(); } } } boolean hasErrors = false; public boolean hasErrors(){ if (responseStream == null){ invoke(); } return this.hasErrors ; } @SuppressWarnings("unchecked") public <J extends JSONAware> J getResponseAsJson(){ return (J)JSONValue.parse(new InputStreamReader(getResponseStream())); } public XMLDocument getResponseAsXML(){ return XMLDocument.getDocumentFor(getResponseStream()); } public InputStream getResponseStream() { if (responseStream == null){ invoke(); } return responseStream; } public InputStream getErrorStream() { if (responseStream == null){ invoke(); } return errorStream; } public String getError(){ return Database.getJdbcTypeHelper("").getTypeRef(InputStream.class).getTypeConverter().toString(getErrorStream()); } @SuppressWarnings("unchecked") private String getParametersAsFormFields(Object p) { if (p == null) { return "" ; } Map parameters = (Map)p; StringBuilder q = new StringBuilder(); Bucket pCount = new Bucket(); parameters.forEach((k,v)->{ if (pCount.intValue() > 0) { q.append("&"); } String key = (String)k; String value; try { if (v instanceof List){ StringBuilder csv = new StringBuilder(); ((List) v).forEach(entry->{ if (csv.length() > 0){ csv.append(","); } csv.append(entry); }); value = URLEncoder.encode(csv.toString(), "utf-8"); }else { value = URLEncoder.encode(String.valueOf(v), "utf-8"); } } catch (UnsupportedEncodingException e) { value = String.valueOf(v); } if (q.indexOf(key+"=") < 0) { q.append(key).append("=").append(value); } pCount.increment(); }); return q.toString(); } private String getParametersAsJSONString(Object p) { return ((JSONAware)p).toString(); } private String getParametersAsXMLString(Object p) { return ((XMLDocument)p).toString(); } private byte[] getParameterRaw(Object p){ if (p instanceof InputStream){ return StringUtil.readBytes((InputStream)p); }else if (p instanceof Reader) { return StringUtil.readBytes(new ReaderInputStream((Reader)p)); }else if (p instanceof byte[]){ return (byte[])p; }else { throw new RuntimeException("unknown raw parameter" + p.getClass()); } } }
Call Handling redirects
swf-db/src/main/java/com/venky/swf/integration/api/Call.java
Call Handling redirects
Java
mit
658197bf931b842299ce411294d944aa916a3641
0
EvilMcJerkface/jessy,EvilMcJerkface/jessy,EvilMcJerkface/jessy,EvilMcJerkface/jessy,EvilMcJerkface/jessy,EvilMcJerkface/jessy
package fr.inria.jessy; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; import net.sourceforge.fractal.utils.PerformanceProbe.TimeRecorder; import org.apache.log4j.Logger; import com.sleepycat.je.DatabaseException; import com.sleepycat.persist.model.SecondaryKey; import fr.inria.jessy.consistency.Consistency; import fr.inria.jessy.consistency.ConsistencyFactory; import fr.inria.jessy.store.DataStore; import fr.inria.jessy.store.JessyEntity; import fr.inria.jessy.store.ReadRequestKey; import fr.inria.jessy.transaction.ExecutionHistory; import fr.inria.jessy.transaction.TransactionHandler; import fr.inria.jessy.transaction.TransactionState; import fr.inria.jessy.vector.CompactVector; /** * Jessy is the abstract base class for local and distributed Jessy * implementation. * * @author Masoud Saeida Ardekani */ public abstract class Jessy { private static Logger logger = Logger.getLogger(Jessy.class); // // CONSTANTS // public enum ExecutionMode { /** * Jessy only executes transactional operations. */ TRANSACTIONAL, /** * Jessy only executes non-transactional operations. */ NON_TRANSACTIONAL, /** * execution mode is not defined yet. */ UNDEFINED, }; public static AtomicInteger lastCommittedTransactionSeqNumber = new AtomicInteger(); // // CLASS FIELDS // private static TimeRecorder ReadTime = new TimeRecorder("Jessy#readTime"); protected DataStore dataStore; Consistency consistency; // // OBJECT FIELDS // private ExecutionMode transactionalAccess = ExecutionMode.UNDEFINED; // Map<AtomicInteger, EntitySet> committedWritesets; ConcurrentMap<TransactionHandler, ExecutionHistory> handler2executionHistory; protected List<Class<? extends JessyEntity>> entityClasses; protected Jessy() throws Exception { File environmentHome = new File(System.getProperty("user.dir")); boolean readOnly = false; String storeName = "store"; dataStore = new DataStore(environmentHome, readOnly, storeName); consistency = ConsistencyFactory.getConsistency(dataStore); handler2executionHistory = new ConcurrentHashMap<TransactionHandler, ExecutionHistory>(); entityClasses = new ArrayList<Class<? extends JessyEntity>>(); lastCommittedTransactionSeqNumber.set(0); } protected DataStore getDataStore() { return dataStore; } public ExecutionHistory getExecutionHistory( TransactionHandler transactionHandler) { return handler2executionHistory.get(transactionHandler); } public void setExecutionHistory(ExecutionHistory executionHistory) { handler2executionHistory.put(executionHistory.getTransactionHandler(), executionHistory); } /** * Setup a primary index and secondary index in the data store for * performing read and write operations on entities with type E. * <p> * This method only creates one secondary index for the default secondary * key. In order to performs reads on different secondary keys (fields), and * not on the default secondaryKey, first, an index for each of them should * be created by calling {@link #addSecondaryIndex(Class, ArrayList)} * * @param <E> * The Type of the entity that will be store/retrieve * @param entityClass * The Class of the entity that will be store/retrieve * @throws Exception */ public <E extends JessyEntity> void addEntity(Class<E> entityClass) throws Exception { if (!entityClasses.contains(entityClass)) { dataStore.addPrimaryIndex(entityClass); dataStore.addSecondaryIndex(entityClass, String.class, "secondaryKey"); entityClasses.add(entityClass); } } /** * Setup an additional secondary index in the data store for performing read * operations on entities with type <code>E</code>, and on the field of type * <code>SK</code>, named * <code>keyName<code> and annotated with {@link SecondaryKey} * * @param <E> * The Type of the entity that will be retrieve * @param <SK> * The Type of the additional secondary key. * @param entityClass * The Class of the entity that will be retrieve * @param keyClass * The class of the additional secondary key * @param keyName * The name of the additional secondary Key * @throws Exception */ public <E extends JessyEntity, SK> void addSecondaryIndex( Class<E> entityClass, Class<SK> keyClass, String keyName) throws Exception { dataStore.addSecondaryIndex(entityClass, keyClass, keyName); } /** * This method should be called for reading an entity with a query on a * default secondary key. * <p> * Executes a read operation on Jessy. It first checks to see if the * transaction has written a value for the same entity or not. If it has * written a new value previously, it returns that value, otherwise, it * calls the {@link Jessy#performRead(Class, String, Object, List)} method. * <p> * This read is performed on {@link JessyEntity#getKey()} * * @param <E> * Type of the entity to read the value from. * @param entityClass * Class of the entity to read the value from. * @param keyValue * The value of the secondary key * @return The entity with the secondary key value equals keyValue */ public <E extends JessyEntity> E read( TransactionHandler transactionHandler, Class<E> entityClass, String keyValue) throws Exception { ReadTime.start(); ExecutionHistory executionHistory = handler2executionHistory .get(transactionHandler); if (executionHistory == null) { ReadTime.stop(); throw new NullPointerException("Transaction has not been started"); } E entity; entity = executionHistory.getWriteEntity(keyValue); // we first check it this entity has been updated in this transaction // before! if (entity == null) { // if the entity has not been updated, we check if it has been read // in the same transaction before. entity = executionHistory.getReadEntity(keyValue); if (entity == null) entity = performRead(entityClass, "secondaryKey", keyValue, executionHistory.getReadSet().getCompactVector()); } if (entity != null) { executionHistory.addReadEntity(entity); ReadTime.stop(); return entity; } else { ReadTime.stop(); return null; } } /** * * Executes a read operation ONLY on Jessy. It calls the * {@link Jessy#performRead(Class, String, Object, List)} method to read the * data. * <p> * This read is performed on all keys provided {@code keys} This method * never checks local cache!!!! * <p> * If the cardinality is not known in advance, always use this method since * it returns all consistent entities corresponding to the keys. * * @param <E> * The Type of the entity to read the value from. * @param <SK> * The Type of the secondary key to read the value from. * @param entityClass * The Class of the entity to read the value from. * @param keyName * The name of the secondary key. * @param keyValue * The value of the secondary key * @return The entity with the keyName field value equals keyValue */ public <E extends JessyEntity, SK> Collection<E> read( TransactionHandler transactionHandler, Class<E> entityClass, List<ReadRequestKey<?>> keys) throws Exception { ExecutionHistory executionHistory = handler2executionHistory .get(transactionHandler); if (executionHistory == null) { throw new NullPointerException("Transaction has not been started"); } Collection<E> entities = performRead(entityClass, keys, executionHistory.getReadSet().getCompactVector()); if (entities != null) { executionHistory.addReadEntity(entities); return entities; } else { return null; } } /** * Performs a local or remote read operation depending on the specific * implementation of Jessy. * * @param <E> * Type of the entity to read the value from. * @param <SK> * Type of the secondary key to read the value from. * @param entityClass * Class of the entity to read the value from. * @param keyName * The name of the secondary key * @param keyValue * The value of the secondary key * @param readList * List of vectors of already executed read operations. * @return An entity with the secondary key equals keyName and its value * equals keyValue */ protected abstract <E extends JessyEntity, SK> E performRead( Class<E> entityClass, String keyName, SK keyValue, CompactVector<String> readSet) throws InterruptedException, ExecutionException; /** * Performs a local or remote read operation depending on the specific * implementation of Jessy on all provided keys. * * @param <E> * Type of the entity to read the value from. * @param <SK> * Type of the secondary key to read the value from. * @param entityClass * Class of the entity to read the value from. * @param keyName * The name of the secondary key * @param keyValue * The value of the secondary key * @param readList * List of vectors of already executed read operations. * @return An entity with the secondary key equals keyName and its value * equals keyValue */ protected abstract <E extends JessyEntity> Collection<E> performRead( Class<E> entityClass, List<ReadRequestKey<?>> keys, CompactVector<String> readSet) throws InterruptedException, ExecutionException; /** * Stores the entity locally. The locally stored entities will be stored in * the database upon calling {@link Jessy#commitTransaction()}. * * @param <E> * Type of the entity to read the value from. * @param entity */ public <E extends JessyEntity> void write( TransactionHandler transactionHandler, E entity) throws NullPointerException { ExecutionHistory executionHistory = handler2executionHistory .get(transactionHandler); if (executionHistory == null) { throw new NullPointerException("Transaction has not been started"); } else { // First checks if we have already read an entity with the same key! // TODO make this conditional according to user definition! (if // disabled, performance gain) JessyEntity tmp = executionHistory.getReadEntity(entity.getKey()); if (tmp == null) { /* * the operation is a blind write! First issue a read operation. */ try { tmp = read(transactionHandler, entity.getClass(), entity.getKey()); } catch (Exception e) { /* * Nothing to do. if this is a first write operation, then * it comes here! */ } } entity.setLocalVector(tmp.getLocalVector()); executionHistory.addWriteEntity(entity); } } /** * Add the entity into the createSet. * <p> * TODO It should be checked whether this entity has been put or not. If the * above rule is ensured by the client, then create is much faster. (only * one write) */ public <E extends JessyEntity> void create( TransactionHandler transactionHandler, E entity) throws NullPointerException { ExecutionHistory executionHistory = handler2executionHistory .get(transactionHandler); if (executionHistory == null) { throw new NullPointerException("Transaction has not been started"); } else { executionHistory.addCreateEntity(entity); } } public <E extends JessyEntity> void remove( TransactionHandler transactionHandler, E entity) throws NullPointerException { entity.removoe(); write(transactionHandler, entity); } public TransactionHandler startTransaction() throws Exception { if (transactionalAccess == ExecutionMode.UNDEFINED) transactionalAccess = ExecutionMode.TRANSACTIONAL; if (transactionalAccess == ExecutionMode.TRANSACTIONAL) { TransactionHandler transactionHandler = new TransactionHandler(); ExecutionHistory executionHistory = new ExecutionHistory( transactionHandler); executionHistory.changeState(TransactionState.EXECUTING); handler2executionHistory.put(transactionHandler, executionHistory); return transactionHandler; } throw new Exception( "Jessy has been accessed in non-transactional way. It cannot be accesesed transactionally"); } /** * Commit the open transaction, and garbage collect it. * * @return */ public abstract ExecutionHistory commitTransaction( TransactionHandler transactionHandler); /** * Put the transaction in the aborted list, and does nothing else. * * @param transactionHandler */ public ExecutionHistory abortTransaction( TransactionHandler transactionHandler) { ExecutionHistory executionHistory = handler2executionHistory .get(transactionHandler); executionHistory.changeState(TransactionState.ABORTED_BY_CLIENT); return executionHistory; } /** * Executes a non-transactional read on local datastore. This read is * performed on {@link JessyEntity#getKey()} * * @param <E> * The Type of the entity to read the value from. * @param entityClass * The Class of the entity to read the value from. * @param keyValue * @return An entity with the secondary key value equals keyValue */ public <E extends JessyEntity> E read(Class<E> entityClass, String keyValue) throws Exception { if (transactionalAccess == ExecutionMode.UNDEFINED) transactionalAccess = ExecutionMode.NON_TRANSACTIONAL; if (transactionalAccess == ExecutionMode.NON_TRANSACTIONAL) { return performRead(entityClass, "secondaryKey", keyValue, null); } throw new Exception( "Jessy has been accessed in transactional way. It cannot be accesesed non-transactionally"); } /** * Executes a non-transactional write. Write the entity into the local * datastore This write is performed on {@link JessyEntity#getKey()} * * @param <E> * Type of the entity to read the value from. * @param entity * the object to be written into the local datastore. */ public <E extends JessyEntity> void write(E entity) throws Exception { if (transactionalAccess == ExecutionMode.UNDEFINED) transactionalAccess = ExecutionMode.NON_TRANSACTIONAL; if (transactionalAccess == ExecutionMode.NON_TRANSACTIONAL) { performNonTransactionalWrite(entity); return; } throw new Exception( "Jessy has been accessed in transactional way. It cannot be accesesed non-transactionally"); } protected abstract <E extends JessyEntity> void performNonTransactionalWrite( E entity) throws InterruptedException, ExecutionException; /** * Apply changes of a writeSet and createSet of a committed transaction to * the datastore. * * @param transactionHandler * handler of a committed transaction. */ public void applyModifiedEntities(ExecutionHistory executionHistory) { // ExecutionHistory executionHistory = handler2executionHistory // .get(transactionHandler); Iterator<? extends JessyEntity> itr; if (executionHistory.getWriteSet().size() > 0) { itr = executionHistory.getWriteSet().getEntities().iterator(); while (itr.hasNext()) { JessyEntity tmp = itr.next(); // Send the entity to the datastore to be saved dataStore.put(tmp); } } if (executionHistory.getCreateSet().size() > 0) { itr = executionHistory.getCreateSet().getEntities().iterator(); while (itr.hasNext()) { JessyEntity tmp = itr.next(); // Send the entity to the datastore to be saved dataStore.put(tmp); } } } public void garbageCollectTransaction(TransactionHandler transactionHandler) { handler2executionHistory.remove(transactionHandler); } protected Set<Object> activeClients = new HashSet<Object>(); public synchronized void registerClient(Object object) { if (!activeClients.contains(object)) activeClients.add(object); } public void close(Object object) throws DatabaseException { dataStore.close(); logger.info("Jessy DataStore is closed. The data should be permanent by now."); } // TODO public void open() { } public Consistency getConsistency() { return this.consistency; } }
src/fr/inria/jessy/Jessy.java
package fr.inria.jessy; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; import org.apache.log4j.Logger; import net.sourceforge.fractal.utils.PerformanceProbe.TimeRecorder; import com.sleepycat.je.DatabaseException; import com.sleepycat.persist.model.SecondaryKey; import fr.inria.jessy.consistency.Consistency; import fr.inria.jessy.consistency.ConsistencyFactory; import fr.inria.jessy.store.DataStore; import fr.inria.jessy.store.EntitySet; import fr.inria.jessy.store.JessyEntity; import fr.inria.jessy.store.ReadRequestKey; import fr.inria.jessy.transaction.ExecutionHistory; import fr.inria.jessy.transaction.Transaction; import fr.inria.jessy.transaction.TransactionHandler; import fr.inria.jessy.transaction.TransactionState; import fr.inria.jessy.vector.CompactVector; /** * Jessy is the abstract base class for local and distributed Jessy * implementation. * * @author Masoud Saeida Ardekani */ public abstract class Jessy { private static Logger logger = Logger.getLogger(Jessy.class); // // CONSTANTS // public enum ExecutionMode { /** * Jessy only executes transactional operations. */ TRANSACTIONAL, /** * Jessy only executes non-transactional operations. */ NON_TRANSACTIONAL, /** * execution mode is not defined yet. */ UNDEFINED, }; public static AtomicInteger lastCommittedTransactionSeqNumber = new AtomicInteger(); // // CLASS FIELDS // private static TimeRecorder ReadTime = new TimeRecorder("Jessy#readTime"); protected DataStore dataStore; Consistency consistency; // // OBJECT FIELDS // private ExecutionMode transactionalAccess = ExecutionMode.UNDEFINED; // Map<AtomicInteger, EntitySet> committedWritesets; ConcurrentMap<TransactionHandler, ExecutionHistory> handler2executionHistory; protected List<Class<? extends JessyEntity>> entityClasses; protected Jessy() throws Exception { File environmentHome = new File(System.getProperty("user.dir")); boolean readOnly = false; String storeName = "store"; dataStore = new DataStore(environmentHome, readOnly, storeName); consistency = ConsistencyFactory.getConsistency(dataStore); handler2executionHistory = new ConcurrentHashMap<TransactionHandler, ExecutionHistory>(); entityClasses = new ArrayList<Class<? extends JessyEntity>>(); lastCommittedTransactionSeqNumber.set(0); } protected DataStore getDataStore() { return dataStore; } public ExecutionHistory getExecutionHistory( TransactionHandler transactionHandler) { return handler2executionHistory.get(transactionHandler); } public void setExecutionHistory(ExecutionHistory executionHistory) { handler2executionHistory.put(executionHistory.getTransactionHandler(), executionHistory); } /** * Setup a primary index and secondary index in the data store for * performing read and write operations on entities with type E. * <p> * This method only creates one secondary index for the default secondary * key. In order to performs reads on different secondary keys (fields), and * not on the default secondaryKey, first, an index for each of them should * be created by calling {@link #addSecondaryIndex(Class, ArrayList)} * * @param <E> * The Type of the entity that will be store/retrieve * @param entityClass * The Class of the entity that will be store/retrieve * @throws Exception */ public <E extends JessyEntity> void addEntity(Class<E> entityClass) throws Exception { if (!entityClasses.contains(entityClass)) { dataStore.addPrimaryIndex(entityClass); dataStore.addSecondaryIndex(entityClass, String.class, "secondaryKey"); entityClasses.add(entityClass); } } /** * Setup an additional secondary index in the data store for performing read * operations on entities with type <code>E</code>, and on the field of type * <code>SK</code>, named * <code>keyName<code> and annotated with {@link SecondaryKey} * * @param <E> * The Type of the entity that will be retrieve * @param <SK> * The Type of the additional secondary key. * @param entityClass * The Class of the entity that will be retrieve * @param keyClass * The class of the additional secondary key * @param keyName * The name of the additional secondary Key * @throws Exception */ public <E extends JessyEntity, SK> void addSecondaryIndex( Class<E> entityClass, Class<SK> keyClass, String keyName) throws Exception { dataStore.addSecondaryIndex(entityClass, keyClass, keyName); } /** * This method should be called for reading an entity with a query on a * default secondary key. * <p> * Executes a read operation on Jessy. It first checks to see if the * transaction has written a value for the same entity or not. If it has * written a new value previously, it returns that value, otherwise, it * calls the {@link Jessy#performRead(Class, String, Object, List)} method. * <p> * This read is performed on {@link JessyEntity#getKey()} * * @param <E> * Type of the entity to read the value from. * @param entityClass * Class of the entity to read the value from. * @param keyValue * The value of the secondary key * @return The entity with the secondary key value equals keyValue */ public <E extends JessyEntity> E read( TransactionHandler transactionHandler, Class<E> entityClass, String keyValue) throws Exception { ReadTime.start(); ExecutionHistory executionHistory = handler2executionHistory .get(transactionHandler); if (executionHistory == null) { ReadTime.stop(); throw new NullPointerException("Transaction has not been started"); } E entity; entity = executionHistory.getWriteEntity(keyValue); // we first check it this entity has been updated in this transaction // before! if (entity == null) { // if the entity has not been updated, we check if it has been read // in the same transaction before. entity = executionHistory.getReadEntity(keyValue); if (entity == null) entity = performRead(entityClass, "secondaryKey", keyValue, executionHistory.getReadSet().getCompactVector()); } if (entity != null) { executionHistory.addReadEntity(entity); ReadTime.stop(); return entity; } else { ReadTime.stop(); return null; } } /** * * Executes a read operation ONLY on Jessy. It calls the * {@link Jessy#performRead(Class, String, Object, List)} method to read the * data. * <p> * This read is performed on all keys provided {@code keys} This method * never checks local cache!!!! * <p> * If the cardinality is not known in advance, always use this method since * it returns all consistent entities corresponding to the keys. * * @param <E> * The Type of the entity to read the value from. * @param <SK> * The Type of the secondary key to read the value from. * @param entityClass * The Class of the entity to read the value from. * @param keyName * The name of the secondary key. * @param keyValue * The value of the secondary key * @return The entity with the keyName field value equals keyValue */ public <E extends JessyEntity, SK> Collection<E> read( TransactionHandler transactionHandler, Class<E> entityClass, List<ReadRequestKey<?>> keys) throws Exception { ExecutionHistory executionHistory = handler2executionHistory .get(transactionHandler); if (executionHistory == null) { throw new NullPointerException("Transaction has not been started"); } Collection<E> entities = performRead(entityClass, keys, executionHistory.getReadSet().getCompactVector()); if (entities != null) { executionHistory.addReadEntity(entities); return entities; } else { return null; } } /** * Performs a local or remote read operation depending on the specific * implementation of Jessy. * * @param <E> * Type of the entity to read the value from. * @param <SK> * Type of the secondary key to read the value from. * @param entityClass * Class of the entity to read the value from. * @param keyName * The name of the secondary key * @param keyValue * The value of the secondary key * @param readList * List of vectors of already executed read operations. * @return An entity with the secondary key equals keyName and its value * equals keyValue */ protected abstract <E extends JessyEntity, SK> E performRead( Class<E> entityClass, String keyName, SK keyValue, CompactVector<String> readSet) throws InterruptedException, ExecutionException; /** * Performs a local or remote read operation depending on the specific * implementation of Jessy on all provided keys. * * @param <E> * Type of the entity to read the value from. * @param <SK> * Type of the secondary key to read the value from. * @param entityClass * Class of the entity to read the value from. * @param keyName * The name of the secondary key * @param keyValue * The value of the secondary key * @param readList * List of vectors of already executed read operations. * @return An entity with the secondary key equals keyName and its value * equals keyValue */ protected abstract <E extends JessyEntity> Collection<E> performRead( Class<E> entityClass, List<ReadRequestKey<?>> keys, CompactVector<String> readSet) throws InterruptedException, ExecutionException; /** * Stores the entity locally. The locally stored entities will be stored in * the database upon calling {@link Jessy#commitTransaction()}. * * @param <E> * Type of the entity to read the value from. * @param entity */ public <E extends JessyEntity> void write( TransactionHandler transactionHandler, E entity) throws NullPointerException { ExecutionHistory executionHistory = handler2executionHistory .get(transactionHandler); if (executionHistory == null) { throw new NullPointerException("Transaction has not been started"); } else { // First checks if we have already read an entity with the same key! // TODO make this conditional according to user definition! (if // disabled, performance gain) JessyEntity tmp = executionHistory.getReadEntity(entity.getKey()); if (tmp == null) { // the operation is a blind write! First issue a read operation. try { tmp=read(transactionHandler, entity.getClass(), entity.getKey()); // tmp=read(entity.getClass(), entity.getKey()); } catch (Exception e) { /* * Nothing to do. if this is a first write operation, then * it comes here! */ } } entity.setLocalVector(tmp.getLocalVector()); executionHistory.addWriteEntity(entity); } } /** * Add the entity into the createSet. * <p> * TODO It should be checked whether this entity has been put or not. If the * above rule is ensured by the client, then create is much faster. (only * one write) */ public <E extends JessyEntity> void create( TransactionHandler transactionHandler, E entity) throws NullPointerException { ExecutionHistory executionHistory = handler2executionHistory .get(transactionHandler); if (executionHistory == null) { throw new NullPointerException("Transaction has not been started"); } else { executionHistory.addCreateEntity(entity); } } public <E extends JessyEntity> void remove( TransactionHandler transactionHandler, E entity) throws NullPointerException { entity.removoe(); write(transactionHandler, entity); } public TransactionHandler startTransaction() throws Exception { if (transactionalAccess == ExecutionMode.UNDEFINED) transactionalAccess = ExecutionMode.TRANSACTIONAL; if (transactionalAccess == ExecutionMode.TRANSACTIONAL) { TransactionHandler transactionHandler = new TransactionHandler(); ExecutionHistory executionHistory = new ExecutionHistory( transactionHandler); executionHistory.changeState(TransactionState.EXECUTING); handler2executionHistory.put(transactionHandler, executionHistory); return transactionHandler; } throw new Exception( "Jessy has been accessed in non-transactional way. It cannot be accesesed transactionally"); } /** * Commit the open transaction, and garbage collect it. * * @return */ public abstract ExecutionHistory commitTransaction( TransactionHandler transactionHandler); /** * Put the transaction in the aborted list, and does nothing else. * * @param transactionHandler */ public ExecutionHistory abortTransaction( TransactionHandler transactionHandler) { ExecutionHistory executionHistory = handler2executionHistory .get(transactionHandler); executionHistory.changeState(TransactionState.ABORTED_BY_CLIENT); return executionHistory; } /** * Executes a non-transactional read on local datastore. This read is * performed on {@link JessyEntity#getKey()} * * @param <E> * The Type of the entity to read the value from. * @param entityClass * The Class of the entity to read the value from. * @param keyValue * @return An entity with the secondary key value equals keyValue */ public <E extends JessyEntity> E read(Class<E> entityClass, String keyValue) throws Exception { if (transactionalAccess == ExecutionMode.UNDEFINED) transactionalAccess = ExecutionMode.NON_TRANSACTIONAL; if (transactionalAccess == ExecutionMode.NON_TRANSACTIONAL) { return performRead(entityClass, "secondaryKey", keyValue, null); } throw new Exception( "Jessy has been accessed in transactional way. It cannot be accesesed non-transactionally"); } /** * Executes a non-transactional write. Write the entity into the local * datastore This write is performed on {@link JessyEntity#getKey()} * * @param <E> * Type of the entity to read the value from. * @param entity * the object to be written into the local datastore. */ public <E extends JessyEntity> void write(E entity) throws Exception { if (transactionalAccess == ExecutionMode.UNDEFINED) transactionalAccess = ExecutionMode.NON_TRANSACTIONAL; if (transactionalAccess == ExecutionMode.NON_TRANSACTIONAL) { performNonTransactionalWrite(entity); return; } throw new Exception( "Jessy has been accessed in transactional way. It cannot be accesesed non-transactionally"); } protected abstract <E extends JessyEntity> void performNonTransactionalWrite( E entity) throws InterruptedException, ExecutionException; /** * Apply changes of a writeSet and createSet of a committed transaction to * the datastore. * * @param transactionHandler * handler of a committed transaction. */ public void applyModifiedEntities(ExecutionHistory executionHistory) { // ExecutionHistory executionHistory = handler2executionHistory // .get(transactionHandler); Iterator<? extends JessyEntity> itr; if (executionHistory.getWriteSet().size() > 0) { itr = executionHistory.getWriteSet().getEntities().iterator(); while (itr.hasNext()) { JessyEntity tmp = itr.next(); // Send the entity to the datastore to be saved dataStore.put(tmp); } } if (executionHistory.getCreateSet().size() > 0) { itr = executionHistory.getCreateSet().getEntities().iterator(); while (itr.hasNext()) { JessyEntity tmp = itr.next(); // Send the entity to the datastore to be saved dataStore.put(tmp); } } } public void garbageCollectTransaction(TransactionHandler transactionHandler) { handler2executionHistory.remove(transactionHandler); } protected Set<Object> activeClients = new HashSet<Object>(); public synchronized void registerClient(Object object) { if (!activeClients.contains(object)) activeClients.add(object); } public void close(Object object) throws DatabaseException { dataStore.close(); logger.info("Jessy DataStore is closed. The data should be permanent by now."); } // TODO public void open() { } public Consistency getConsistency() { return this.consistency; } }
git-svn-id: svn+ssh://scm.gforge.inria.fr/svn/regal/trunk/src/jessy@1135 334080fa-30e0-4884-a663-39494f8d70c4
src/fr/inria/jessy/Jessy.java
Java
epl-1.0
4c1758d3b8003d4b3589fe80af16f590c2e6abe8
0
alb-i986/junit,mekwin87/junit4,songfj/junit,jordancheah/junit,UnimibSoftEngCourse1516/lab2-es3-i.pigazzini,UnimibSoftEngCourse1516/lab2-es3-s.renzo,laercioferracini/junit,rws-github/junit,alb-i986/junit,openhardnudd/junit,junit-team/junit,flomotlik/junit,edwardmlyte/junit,slezier/junit,baev/junit,freezhan/junit,MichaelJY91/junit,stefanbirkner/junit,kobe73er/MyUnit,y-kt/junit,Thothius/junit,MichaelJY91/junit,kobe73er/dUnit,rws-github/junit,moinuddin14/junit,elijah513/junit,nathanchen/JUnitCodeReading,UnimibSoftEngCourse1516/lab2-es3-e.nani1,dvberkel/junit,UnimibSoftEngCourse1516/lab2-es3-s.ravetta,MichaelJY91/junit,UnimibSoftEngCourse1516/lab2-es3-f.spinardi,janocat/junit,Clairebi/JUnit-Clone,hansjoachim/junit,stefanbirkner/junit,Siddartha07/junit,UnimibSoftEngCourse1516/lab2-es3-m.scarpone,dvberkel/junit,marcphilipp/junit,rwarren14/junit,0359xiaodong/junit,elijah513/junit,adko-pl/junit,jhfjhfj1/junit,AxelMonroyX/junit4,onesfreedom/junit,junit-team/junit4,UnimibSoftEngCourse1516/lab2-es3-m.scarpone,feisuo/junit,alohageck0/junit,kobe73er/MyUnit,julien-sobczak/junit,alohageck0/junit,y-kt/junit,UnimibSoftEngCourse1516/lab2-es3-a.mosini,panchenko/junit,alb-i986/junit,witcxc/junit,jordancheah/junit,larrychen1990/junit,junit-team/junit,witcxc/junit,sposam/junit,edwardmlyte/junit,UnimibSoftEngCourse1516/lab2-es3-f.spinardi,1234-/junit,junit-team/junit4,powazny/junit4,UnimibSoftEngCourse1516/lab2-es3-f.spinardi,kobe73er/MyUnit,ashleyfrieze/junit,smayoorans/junit,paulduffin/junit,songfj/junit,kobe73er/dUnit,UrsMetz/junit,UnimibSoftEngCourse1516/lab2-es3-a.tundo,yusuke/junit,eamonnmcmanus/junit,UnimibSoftEngCourse1516/lab2-es3-a.tundo,slezier/junit,paulduffin/junit,stefanbirkner/junit,JoaquinSiabra/junit,UnimibSoftEngCourse1516/lab2-es3-f.cirelli1,hhariri/junit,remus32/junit,nathanchen/JUnitCodeReading,feisuo/junit,1234-/junit,UnimibSoftEngCourse1516/lab2-es3-o.sertori,avandeursen/junit,schauder/junit,UnimibSoftEngCourse1516/lab2-es3-a.mosini,julien-sobczak/junit,UnimibSoftEngCourse1516/lab2-es3-m.vella6,quentin9696/junit,kobe73er/dUnit,hhariri/junit,elijah513/junit,UrsMetz/junit,UnimibSoftEngCourse1516/lab2-es3-f.giannini3,chrisvest/junit,UnimibSoftEngCourse1516/lab2-es3-l.salvestrini,yusuke/junit,slezier/junit,larrychen1990/junit,witcxc/junit,mnk/junit,mnk/junit,UnimibSoftEngCourse1516/lab2-es3-a.mosini,julien-sobczak/junit,UrsMetz/junit,hhariri/junit,UnimibSoftEngCourse1516/lab2-es3-s.renzo,UnimibSoftEngCourse1516/lab2-es3-s.renzo,freezhan/junit,avandeursen/junit,rws-github/junit,UnimibSoftEngCourse1516/lab2-es3-l.salvestrini,UnimibSoftEngCourse1516/lab2-es3-m.polonioli,laercioferracini/junit,UnimibSoftEngCourse1516/lab2-es3-a.tundo,flomotlik/junit,jhfjhfj1/junit,rwarren14/junit,jordancheah/junit,sposam/junit,rwarren14/junit,UnimibSoftEngCourse1516/lab2-es3-i.pigazzini,baev/junit,easyMan-zzy/junit,powazny/junit4,AxelMonroyX/junit4,sposam/junit,hansjoachim/junit,0359xiaodong/junit,junit-team/junit,powazny/junit4,cherryleer/junit,UnimibSoftEngCourse1516/lab2-es3-f.cirelli1,schauder/junit,GeeChao/junit,junit-team/junit4,remus32/junit,kcooney/junit,jhfjhfj1/junit,MingxuanChen/junit,quentin9696/junit,UnimibSoftEngCourse1516/lab2-es3-o.sertori,UnimibSoftEngCourse1516/lab2-es3-m.polonioli,easyMan-zzy/junit,freezhan/junit,UnimibSoftEngCourse1516/lab2-es3-m.vella6,UnimibSoftEngCourse1516/lab2-es3-e.nani1,ashleyfrieze/junit,rws-github/junit,Thothius/junit,chrisvest/junit,edwardmlyte/junit,remus32/junit,feisuo/junit,cherryleer/junit,vorburger/junit,smayoorans/junit,easyMan-zzy/junit,UnimibSoftEngCourse1516/lab2-es3-l.salvestrini,chrisvest/junit,larrychen1990/junit,eamonnmcmanus/junit,moinuddin14/junit,cherryleer/junit,janocat/junit,JoaquinSiabra/junit,rws-github/junit,UnimibSoftEngCourse1516/lab2-es3-m.polonioli,Clairebi/JUnit-Clone,openhardnudd/junit,VikingDen/junit,nathanchen/JUnitCodeReading,GeeChao/junit,UnimibSoftEngCourse1516/lab2-es3-m.vella6,paulduffin/junit,avandeursen/junit,schauder/junit,mekwin87/junit4,Siddartha07/junit,mekwin87/junit4,UnimibSoftEngCourse1516/lab2-es3-f.giannini3,hansjoachim/junit,baev/junit,MingxuanChen/junit,0359xiaodong/junit,onesfreedom/junit,mekwin87/junit4,adko-pl/junit,y-kt/junit,openhardnudd/junit,Siddartha07/junit,UnimibSoftEngCourse1516/lab2-es3-i.pigazzini,quentin9696/junit,onesfreedom/junit,vorburger/junit,MingxuanChen/junit,kcooney/junit,adko-pl/junit,ashleyfrieze/junit,vorburger/junit,kcooney/junit,UnimibSoftEngCourse1516/lab2-es3-o.sertori,Thothius/junit,marcphilipp/junit,AxelMonroyX/junit4,marcphilipp/junit,smayoorans/junit,UrsMetz/junit,mnk/junit,panchenko/junit,moinuddin14/junit,flomotlik/junit,UnimibSoftEngCourse1516/lab2-es3-s.ravetta,janocat/junit,VikingDen/junit,1234-/junit,nathanchen/JUnitCodeReading,vorburger/junit,UnimibSoftEngCourse1516/lab2-es3-e.nani1,GeeChao/junit,panchenko/junit,eamonnmcmanus/junit,laercioferracini/junit,alohageck0/junit,UnimibSoftEngCourse1516/lab2-es3-m.scarpone,yusuke/junit,VikingDen/junit,dvberkel/junit,JoaquinSiabra/junit,UnimibSoftEngCourse1516/lab2-es3-f.cirelli1,UnimibSoftEngCourse1516/lab2-es3-s.ravetta,Clairebi/JUnit-Clone,UnimibSoftEngCourse1516/lab2-es3-f.giannini3,songfj/junit
package org.junit; /** * Thrown when an {@link org.junit.Assert#assertEquals(Object, Object) assertEquals(String, String)} fails. Create and throw * a <code>ComparisonFailure</code> manually if you want to show users the difference between two complex * strings. * * Inspired by a patch from Alex Chaffee ([email protected]) * * @since 4.0 */ public class ComparisonFailure extends AssertionError { /** * The maximum length for fExpected and fActual. If it is exceeded, the strings should be shortened. * * @see ComparisonCompactor */ private static final int MAX_CONTEXT_LENGTH = 20; private static final long serialVersionUID = 1L; private String fExpected; private String fActual; /** * Constructs a comparison failure. * * @param message the identifying message or null * @param expected the expected string value * @param actual the actual string value */ public ComparisonFailure(String message, String expected, String actual) { super(message); fExpected = expected; fActual = actual; } /** * Returns "..." in place of common prefix and "..." in * place of common suffix between expected and actual. * * @see Throwable#getMessage() */ @Override public String getMessage() { return new ComparisonCompactor(MAX_CONTEXT_LENGTH, fExpected, fActual).compact(super.getMessage()); } /** * Returns the actual string value * * @return the actual string value */ public String getActual() { return fActual; } /** * Returns the expected string value * * @return the expected string value */ public String getExpected() { return fExpected; } private static class ComparisonCompactor { private static final String ELLIPSIS = "..."; private static final String DELTA_END = "]"; private static final String DELTA_START = "["; /** * The maximum length for <code>expected</code> and <code>actual</code>. When <code>contextLength</code> * is exceeded, the Strings are shortened */ private int fContextLength; private String fExpected; private String fActual; /** * The length of the shared prefix / suffix of the expected and actual strings. * Equals to zero if the strings do not share a common prefix/suffix. */ private int fPrefix; private int fSuffix; /** * @param contextLength the maximum length for <code>expected</code> and <code>actual</code>. When contextLength * is exceeded, the Strings are shortened * @param expected the expected string value * @param actual the actual string value */ public ComparisonCompactor(int contextLength, String expected, String actual) { fContextLength = contextLength; fExpected = expected; fActual = actual; } private String compact(String message) { if (fExpected == null || fActual == null || areStringsEqual()) { return Assert.format(message, fExpected, fActual); } findCommonPrefix(); findCommonSuffix(); String expected = compactString(fExpected); String actual = compactString(fActual); return Assert.format(message, expected, actual); } private String compactString(String source) { String result = DELTA_START + source.substring(fPrefix, source.length() - fSuffix) + DELTA_END; if (fPrefix > 0) { result = computeCommonPrefix() + result; } if (fSuffix > 0) { result = result + computeCommonSuffix(); } return result; } private void findCommonPrefix() { fPrefix = 0; int end = Math.min(fExpected.length(), fActual.length()); for (; fPrefix < end; fPrefix++) { if (fExpected.charAt(fPrefix) != fActual.charAt(fPrefix)) { break; } } } private void findCommonSuffix() { int expectedSuffix = fExpected.length() - 1; int actualSuffix = fActual.length() - 1; for (; actualSuffix >= fPrefix && expectedSuffix >= fPrefix; actualSuffix--, expectedSuffix--) { if (fExpected.charAt(expectedSuffix) != fActual.charAt(actualSuffix)) { break; } } fSuffix = fExpected.length() - expectedSuffix - 1; } private String computeCommonPrefix() { return (fPrefix > fContextLength ? ELLIPSIS : "") + fExpected.substring(Math.max(0, fPrefix - fContextLength), fPrefix); } private String computeCommonSuffix() { int end = Math.min(fExpected.length() - fSuffix + fContextLength, fExpected.length()); return fExpected.substring(fExpected.length() - fSuffix, end) + (fExpected.length() - fSuffix < fExpected.length() - fContextLength ? ELLIPSIS : ""); } private boolean areStringsEqual() { return fExpected.equals(fActual); } } }
src/main/java/org/junit/ComparisonFailure.java
package org.junit; /** * Thrown when an {@link org.junit.Assert#assertEquals(Object, Object) assertEquals(String, String)} fails. Create and throw * a <code>ComparisonFailure</code> manually if you want to show users the difference between two complex * strings. * * Inspired by a patch from Alex Chaffee ([email protected]) * * @since 4.0 */ public class ComparisonFailure extends AssertionError { /** * The maximum length for fExpected and fActual. If it is exceeded, the strings should be shortened. * * @see ComparisonCompactor */ private static final int MAX_CONTEXT_LENGTH = 20; private static final long serialVersionUID = 1L; private String fExpected; private String fActual; /** * Constructs a comparison failure. * * @param message the identifying message or null * @param expected the expected string value * @param actual the actual string value */ public ComparisonFailure(String message, String expected, String actual) { super(message); fExpected = expected; fActual = actual; } /** * Returns "..." in place of common prefix and "..." in * place of common suffix between expected and actual. * * @see Throwable#getMessage() */ @Override public String getMessage() { return new ComparisonCompactor(MAX_CONTEXT_LENGTH, fExpected, fActual).compact(super.getMessage()); } /** * Returns the actual string value * * @return the actual string value */ public String getActual() { return fActual; } /** * Returns the expected string value * * @return the expected string value */ public String getExpected() { return fExpected; } private static class ComparisonCompactor { private static final String ELLIPSIS = "..."; private static final String DELTA_END = "]"; private static final String DELTA_START = "["; /** * The maximum length for <code>expected</code> and <code>actual</code>. When <code>contextLength</code> * is exceeded, the Strings are shortened */ private int fContextLength; private String fExpected; private String fActual; private int fPrefix; private int fSuffix; /** * @param contextLength the maximum length for <code>expected</code> and <code>actual</code>. When contextLength * is exceeded, the Strings are shortened * @param expected the expected string value * @param actual the actual string value */ public ComparisonCompactor(int contextLength, String expected, String actual) { fContextLength = contextLength; fExpected = expected; fActual = actual; } private String compact(String message) { if (fExpected == null || fActual == null || areStringsEqual()) { return Assert.format(message, fExpected, fActual); } findCommonPrefix(); findCommonSuffix(); String expected = compactString(fExpected); String actual = compactString(fActual); return Assert.format(message, expected, actual); } private String compactString(String source) { String result = DELTA_START + source.substring(fPrefix, source.length() - fSuffix + 1) + DELTA_END; if (fPrefix > 0) { result = computeCommonPrefix() + result; } if (fSuffix > 0) { result = result + computeCommonSuffix(); } return result; } private void findCommonPrefix() { fPrefix = 0; int end = Math.min(fExpected.length(), fActual.length()); for (; fPrefix < end; fPrefix++) { if (fExpected.charAt(fPrefix) != fActual.charAt(fPrefix)) { break; } } } private void findCommonSuffix() { int expectedSuffix = fExpected.length() - 1; int actualSuffix = fActual.length() - 1; for (; actualSuffix >= fPrefix && expectedSuffix >= fPrefix; actualSuffix--, expectedSuffix--) { if (fExpected.charAt(expectedSuffix) != fActual.charAt(actualSuffix)) { break; } } fSuffix = fExpected.length() - expectedSuffix; } private String computeCommonPrefix() { return (fPrefix > fContextLength ? ELLIPSIS : "") + fExpected.substring(Math.max(0, fPrefix - fContextLength), fPrefix); } private String computeCommonSuffix() { int end = Math.min(fExpected.length() - fSuffix + 1 + fContextLength, fExpected.length()); return fExpected.substring(fExpected.length() - fSuffix + 1, end) + (fExpected.length() - fSuffix + 1 < fExpected.length() - fContextLength ? ELLIPSIS : ""); } private boolean areStringsEqual() { return fExpected.equals(fActual); } } }
Refactoring ensuring fSuffix is equal to length of suffix. While fPrefix was equal to length of prefix, in old code fSuffix was equal to length of suffix + 1. Made the two consistent, resulting in simplification of suffix computation (removed "+ 1" four times, added "- 1" once). Found inconsistency through branch coverage analysis, which suggested that condition "fSuffix > 0" could never be false.
src/main/java/org/junit/ComparisonFailure.java
Refactoring ensuring fSuffix is equal to length of suffix.
Java
agpl-3.0
e500c2e1d6eb5e1c655007e8702691ce4ba61d6c
0
smith750/kfs,quikkian-ua-devops/will-financials,quikkian-ua-devops/kfs,quikkian-ua-devops/kfs,bhutchinson/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/kfs,ua-eas/kfs-devops-automation-fork,bhutchinson/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/kfs,ua-eas/kfs,smith750/kfs,ua-eas/kfs,bhutchinson/kfs,kkronenb/kfs,kkronenb/kfs,quikkian-ua-devops/will-financials,kuali/kfs,kkronenb/kfs,kuali/kfs,kkronenb/kfs,UniversityOfHawaii/kfs,ua-eas/kfs,quikkian-ua-devops/kfs,ua-eas/kfs,smith750/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/will-financials,quikkian-ua-devops/will-financials,UniversityOfHawaii/kfs,kuali/kfs,ua-eas/kfs,quikkian-ua-devops/kfs,ua-eas/kfs-devops-automation-fork,UniversityOfHawaii/kfs,kuali/kfs,quikkian-ua-devops/will-financials,quikkian-ua-devops/will-financials,smith750/kfs,UniversityOfHawaii/kfs,UniversityOfHawaii/kfs,kuali/kfs,bhutchinson/kfs
/* * Copyright 2008 The Kuali Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.sys.service.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.kuali.kfs.sys.businessobject.TaxRegion; import org.kuali.kfs.sys.businessobject.TaxRegionCounty; import org.kuali.kfs.sys.businessobject.TaxRegionPostalCode; import org.kuali.kfs.sys.businessobject.TaxRegionState; import org.kuali.kfs.sys.service.TaxRegionService; import org.kuali.rice.kns.bo.PostalCode; import org.kuali.rice.kns.service.BusinessObjectService; import org.kuali.rice.kns.service.PostalCodeService; import org.kuali.rice.kns.util.ObjectUtils; import org.springframework.transaction.annotation.Transactional; @Transactional public class TaxRegionServiceImpl implements TaxRegionService { private BusinessObjectService businessObjectService; private PostalCodeService postalCodeService; /** * @see org.kuali.kfs.sys.service.TaxRegionService#getSalesTaxRegions(java.lang.String) */ public List<TaxRegion> getSalesTaxRegions(String postalCode) { List<TaxRegion> salesTaxRegions = new ArrayList<TaxRegion>(); PostalCode postalCodeObj = postalCodeService.getByPrimaryId(postalCode); if(ObjectUtils.isNotNull(postalCodeObj)) { salesTaxRegions.addAll(getPostalCodeTaxRegions(postalCodeObj.getPostalCode(), postalCodeObj.getPostalCountryCode(), false)); salesTaxRegions.addAll(getStateTaxRegions(postalCodeObj.getPostalStateCode(), postalCodeObj.getPostalCountryCode(), false)); salesTaxRegions.addAll(getCountyTaxRegions(postalCodeObj.getCountyCode(), postalCodeObj.getPostalStateCode(), postalCodeObj.getPostalCountryCode(), false)); } return salesTaxRegions; } /** * @see org.kuali.kfs.sys.service.TaxRegionService#getUseTaxRegions(java.lang.String) */ public List<TaxRegion> getUseTaxRegions(String postalCode) { List<TaxRegion> useTaxRegions = new ArrayList<TaxRegion>(); PostalCode postalCodeObj = postalCodeService.getByPrimaryId(postalCode); useTaxRegions.addAll(getPostalCodeTaxRegions(postalCodeObj.getPostalCode(), postalCodeObj.getPostalCountryCode(), true)); useTaxRegions.addAll(getStateTaxRegions(postalCodeObj.getPostalStateCode(), postalCodeObj.getPostalCountryCode(), true)); useTaxRegions.addAll(getCountyTaxRegions(postalCodeObj.getCountyCode(), postalCodeObj.getPostalStateCode(), postalCodeObj.getPostalCountryCode(), true)); return useTaxRegions; } /** * This method returns a list of tax regions that match postal code and country code. * * @param postalCode postal code * @param postalCountryCode country code * @param useTaxOnly determines if only (use tax = true) tax regions are returned * @return */ protected List<TaxRegion> getPostalCodeTaxRegions(String postalCode, String postalCountryCode, boolean useTaxOnly) { List<TaxRegion> postalCodeTaxRegions = new ArrayList<TaxRegion>(); if (StringUtils.isNotEmpty(postalCode)) { Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("postalCode", postalCode); criteria.put("postalCountryCode", postalCountryCode); criteria.put("active", true); if (useTaxOnly) { criteria.put("taxRegion.taxRegionUseTaxIndicator", useTaxOnly); } List<TaxRegionPostalCode> taxRegionPostalCodes = (List<TaxRegionPostalCode>) businessObjectService.findMatching(TaxRegionPostalCode.class, criteria); for (TaxRegionPostalCode taxRegionPostalCode : taxRegionPostalCodes) { postalCodeTaxRegions.add(taxRegionPostalCode.getTaxRegion()); } } return postalCodeTaxRegions; } /** * This method returns a list of tax regions that match state code and country code. * * @param stateCode state code * @param postalCountryCode country code * @param useTaxOnly determines if only (use tax = true) tax regions are returned * @return */ protected List<TaxRegion> getStateTaxRegions(String stateCode, String postalCountryCode, boolean useTaxOnly) { List<TaxRegion> stateTaxRegions = new ArrayList<TaxRegion>(); if (StringUtils.isNotEmpty(stateCode)) { Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("stateCode", stateCode); criteria.put("postalCountryCode", postalCountryCode); criteria.put("active", true); if (useTaxOnly) { criteria.put("taxRegion.taxRegionUseTaxIndicator", useTaxOnly); } List<TaxRegionState> taxRegionStates = (List<TaxRegionState>) businessObjectService.findMatching(TaxRegionState.class, criteria); for (TaxRegionState taxRegionState : taxRegionStates) { stateTaxRegions.add(taxRegionState.getTaxRegion()); } } return stateTaxRegions; } /** * This method returns a list of tax regions that match county code, state code, and country code * @param countyCode county code * @param stateCode state code * @param postalCountryCode country code * @param useTaxOnly determines if only (use tax = true) tax regions are returned * @return */ protected List<TaxRegion> getCountyTaxRegions(String countyCode, String stateCode, String postalCountryCode, boolean useTaxOnly) { List<TaxRegion> countyTaxRegions = new ArrayList<TaxRegion>(); if (StringUtils.isNotEmpty(countyCode)) { Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("countyCode", countyCode); criteria.put("stateCode", stateCode); criteria.put("postalCountryCode", postalCountryCode); criteria.put("active", true); if (useTaxOnly) { criteria.put("taxRegion.taxRegionUseTaxIndicator", useTaxOnly); } List<TaxRegionCounty> taxRegionCounties = (List<TaxRegionCounty>) businessObjectService.findMatching(TaxRegionCounty.class, criteria); for (TaxRegionCounty taxRegionCounty : taxRegionCounties) { countyTaxRegions.add(taxRegionCounty.getTaxRegion()); } } return countyTaxRegions; } public BusinessObjectService getBusinessObjectService() { return businessObjectService; } public void setBusinessObjectService(BusinessObjectService businessObjectService) { this.businessObjectService = businessObjectService; } public PostalCodeService getPostalCodeService() { return postalCodeService; } public void setPostalCodeService(PostalCodeService postalCodeService) { this.postalCodeService = postalCodeService; } }
work/src/org/kuali/kfs/sys/service/impl/TaxRegionServiceImpl.java
/* * Copyright 2008 The Kuali Foundation. * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.sys.service.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.kuali.rice.kns.bo.PostalCode; import org.kuali.kfs.sys.businessobject.TaxRegion; import org.kuali.kfs.sys.businessobject.TaxRegionCounty; import org.kuali.kfs.sys.businessobject.TaxRegionPostalCode; import org.kuali.kfs.sys.businessobject.TaxRegionState; import org.kuali.rice.kns.service.PostalCodeService; import org.kuali.kfs.sys.service.TaxRegionService; import org.kuali.rice.kns.service.BusinessObjectService; import org.springframework.transaction.annotation.Transactional; @Transactional public class TaxRegionServiceImpl implements TaxRegionService { private BusinessObjectService businessObjectService; private PostalCodeService postalCodeService; /** * @see org.kuali.kfs.sys.service.TaxRegionService#getSalesTaxRegions(java.lang.String) */ public List<TaxRegion> getSalesTaxRegions(String postalCode) { List<TaxRegion> salesTaxRegions = new ArrayList<TaxRegion>(); PostalCode postalCodeObj = postalCodeService.getByPrimaryId(postalCode); salesTaxRegions.addAll(getPostalCodeTaxRegions(postalCodeObj.getPostalCode(), postalCodeObj.getPostalCountryCode(), false)); salesTaxRegions.addAll(getStateTaxRegions(postalCodeObj.getPostalStateCode(), postalCodeObj.getPostalCountryCode(), false)); salesTaxRegions.addAll(getCountyTaxRegions(postalCodeObj.getCountyCode(), postalCodeObj.getPostalStateCode(), postalCodeObj.getPostalCountryCode(), false)); return salesTaxRegions; } /** * @see org.kuali.kfs.sys.service.TaxRegionService#getUseTaxRegions(java.lang.String) */ public List<TaxRegion> getUseTaxRegions(String postalCode) { List<TaxRegion> useTaxRegions = new ArrayList<TaxRegion>(); PostalCode postalCodeObj = postalCodeService.getByPrimaryId(postalCode); useTaxRegions.addAll(getPostalCodeTaxRegions(postalCodeObj.getPostalCode(), postalCodeObj.getPostalCountryCode(), true)); useTaxRegions.addAll(getStateTaxRegions(postalCodeObj.getPostalStateCode(), postalCodeObj.getPostalCountryCode(), true)); useTaxRegions.addAll(getCountyTaxRegions(postalCodeObj.getCountyCode(), postalCodeObj.getPostalStateCode(), postalCodeObj.getPostalCountryCode(), true)); return useTaxRegions; } /** * This method returns a list of tax regions that match postal code and country code. * * @param postalCode postal code * @param postalCountryCode country code * @param useTaxOnly determines if only (use tax = true) tax regions are returned * @return */ protected List<TaxRegion> getPostalCodeTaxRegions(String postalCode, String postalCountryCode, boolean useTaxOnly) { List<TaxRegion> postalCodeTaxRegions = new ArrayList<TaxRegion>(); if (StringUtils.isNotEmpty(postalCode)) { Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("postalCode", postalCode); criteria.put("postalCountryCode", postalCountryCode); criteria.put("active", true); if (useTaxOnly) { criteria.put("taxRegion.taxRegionUseTaxIndicator", useTaxOnly); } List<TaxRegionPostalCode> taxRegionPostalCodes = (List<TaxRegionPostalCode>) businessObjectService.findMatching(TaxRegionPostalCode.class, criteria); for (TaxRegionPostalCode taxRegionPostalCode : taxRegionPostalCodes) { postalCodeTaxRegions.add(taxRegionPostalCode.getTaxRegion()); } } return postalCodeTaxRegions; } /** * This method returns a list of tax regions that match state code and country code. * * @param stateCode state code * @param postalCountryCode country code * @param useTaxOnly determines if only (use tax = true) tax regions are returned * @return */ protected List<TaxRegion> getStateTaxRegions(String stateCode, String postalCountryCode, boolean useTaxOnly) { List<TaxRegion> stateTaxRegions = new ArrayList<TaxRegion>(); if (StringUtils.isNotEmpty(stateCode)) { Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("stateCode", stateCode); criteria.put("postalCountryCode", postalCountryCode); criteria.put("active", true); if (useTaxOnly) { criteria.put("taxRegion.taxRegionUseTaxIndicator", useTaxOnly); } List<TaxRegionState> taxRegionStates = (List<TaxRegionState>) businessObjectService.findMatching(TaxRegionState.class, criteria); for (TaxRegionState taxRegionState : taxRegionStates) { stateTaxRegions.add(taxRegionState.getTaxRegion()); } } return stateTaxRegions; } /** * This method returns a list of tax regions that match county code, state code, and country code * @param countyCode county code * @param stateCode state code * @param postalCountryCode country code * @param useTaxOnly determines if only (use tax = true) tax regions are returned * @return */ protected List<TaxRegion> getCountyTaxRegions(String countyCode, String stateCode, String postalCountryCode, boolean useTaxOnly) { List<TaxRegion> countyTaxRegions = new ArrayList<TaxRegion>(); if (StringUtils.isNotEmpty(countyCode)) { Map<String, Object> criteria = new HashMap<String, Object>(); criteria.put("countyCode", countyCode); criteria.put("stateCode", stateCode); criteria.put("postalCountryCode", postalCountryCode); criteria.put("active", true); if (useTaxOnly) { criteria.put("taxRegion.taxRegionUseTaxIndicator", useTaxOnly); } List<TaxRegionCounty> taxRegionCounties = (List<TaxRegionCounty>) businessObjectService.findMatching(TaxRegionCounty.class, criteria); for (TaxRegionCounty taxRegionCounty : taxRegionCounties) { countyTaxRegions.add(taxRegionCounty.getTaxRegion()); } } return countyTaxRegions; } public BusinessObjectService getBusinessObjectService() { return businessObjectService; } public void setBusinessObjectService(BusinessObjectService businessObjectService) { this.businessObjectService = businessObjectService; } public PostalCodeService getPostalCodeService() { return postalCodeService; } public void setPostalCodeService(PostalCodeService postalCodeService) { this.postalCodeService = postalCodeService; } }
Fixes KFSMI-2136
work/src/org/kuali/kfs/sys/service/impl/TaxRegionServiceImpl.java
Fixes KFSMI-2136
Java
agpl-3.0
1d730239fc4f43d36f8b5b87383bf86d0f33fbf2
0
ianopolous/Peergos,ianopolous/Peergos,Peergos/Peergos,Peergos/Peergos,Peergos/Peergos,ianopolous/Peergos
package peergos.server; import peergos.server.cli.CLI; import peergos.server.space.*; import peergos.server.storage.admin.*; import peergos.shared.*; import peergos.server.corenode.*; import peergos.server.fuse.*; import peergos.server.mutable.*; import peergos.server.storage.*; import peergos.server.util.*; import peergos.shared.cbor.*; import peergos.shared.corenode.*; import peergos.shared.crypto.*; import peergos.shared.crypto.asymmetric.*; import peergos.shared.crypto.asymmetric.curve25519.*; import peergos.shared.crypto.hash.*; import peergos.shared.crypto.password.*; import peergos.shared.io.ipfs.multiaddr.*; import peergos.shared.io.ipfs.cid.*; import peergos.shared.io.ipfs.multihash.*; import peergos.shared.mutable.*; import peergos.shared.social.*; import peergos.shared.storage.*; import peergos.shared.user.*; import peergos.shared.user.fs.*; import java.io.*; import java.net.*; import java.nio.file.*; import java.sql.*; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; public class Main { public static final String PEERGOS_PATH = "PEERGOS_PATH"; public static final Path DEFAULT_PEERGOS_DIR_PATH = Paths.get(System.getProperty("user.home"), ".peergos"); static { PublicSigningKey.addProvider(PublicSigningKey.Type.Ed25519, new Ed25519.Java()); } public static Command<Boolean> ENSURE_IPFS_INSTALLED = new Command<>("install-ipfs", "Download/update IPFS binary. Does nothing if current IPFS binary is up-to-date.", args -> { Path ipfsExePath = IpfsWrapper.getIpfsExePath(args); File dir = ipfsExePath.getParent().toFile(); if (! dir.isDirectory() && ! dir.mkdirs()) throw new IllegalStateException("Specified install directory "+ dir +" doesn't exist and can't be created"); IpfsInstaller.ensureInstalled(ipfsExePath); List<IpfsInstaller.Plugin> plugins = IpfsInstaller.Plugin.parseAll(args); Path ipfsDir = IpfsWrapper.getIpfsDir(args); if (! plugins.isEmpty()) if (! ipfsDir.toFile().exists() && ! ipfsDir.toFile().mkdirs()) throw new IllegalStateException("Couldn't create ipfs dir: " + ipfsDir); for (IpfsInstaller.Plugin plugin : plugins) { plugin.ensureInstalled(ipfsDir); } return true; }, Arrays.asList( new Command.Arg("ipfs-exe-path", "Desired path to IPFS executable. Defaults to $PEERGOS_PATH/ipfs", false), new Command.Arg("ipfs-plugins", "comma separated list of ipfs plugins to install, currently only go-ds-s3 is supported", false), new Command.Arg("s3.path", "Path of data store in S3", false, "blocks"), new Command.Arg("s3.bucket", "S3 bucket name", false), new Command.Arg("s3.region", "S3 region", false, "us-east-1"), new Command.Arg("s3.accessKey", "S3 access key", false, ""), new Command.Arg("s3.secretKey", "S3 secret key", false, ""), new Command.Arg("s3.region.endpoint", "Base url for S3 service", false) ) ); public static Command<IpfsWrapper> IPFS = new Command<>("ipfs", "Start IPFS daemon and ensure configuration, optionally manage runtime.", Main::startIpfs, Arrays.asList( new Command.Arg("IPFS_PATH", "Path to IPFS directory. Defaults to $PEERGOS_PATH/.ipfs, or ~/.peergos/.ipfs", false), new Command.Arg("ipfs-exe-path", "Path to IPFS executable. Defaults to $PEERGOS_PATH/ipfs", false), new Command.Arg("ipfs-config-api-port", "IPFS API port", false, "5001"), new Command.Arg("ipfs-config-gateway-port", "IPFS Gateway port", false, "8080"), new Command.Arg("ipfs-config-swarm-port", "IPFS Swarm port", false, "4001"), new Command.Arg("ipfs-config-bootstrap-node-list", "Comma separated list of IPFS bootstrap nodes. Uses existing bootstrap nodes by default.", false), new Command.Arg("ipfs-manage-runtime", "Will manage the IPFS daemon runtime when set (restart on exit)", false, "true") ) ); public static final Command<UserService> PEERGOS = new Command<>("daemon", "The user facing Peergos server", Main::startPeergos, Stream.of( new Command.Arg("port", "service port", false, "8000"), new Command.Arg("peergos.identity.hash", "The hash of peergos user's public key, this is used to bootstrap the pki", true, "z59vuwzfFDp3ZA8ZpnnmHEuMtyA1q34m3Th49DYXQVJntWpxdGrRqXi"), new Command.Arg("pki-node-id", "Ipfs node id of the pki node", true, "QmVdFZgHnEgcedCS2G2ZNiEN59LuVrnRm7z3yXtEBv2XiF"), new Command.Arg("pki.node.ipaddress", "IP address of the pki node", true, "172.104.157.121"), new Command.Arg("pki.node.swarm.port", "Swarm port of the pki node", true, "5001"), new Command.Arg("domain", "Domain name to bind to,", false, "localhost"), new Command.Arg("max-users", "The maximum number of local users", false, "1"), new Command.Arg("useIPFS", "Use IPFS for storage or a local disk store", false, "true"), new Command.Arg("mutable-pointers-file", "The filename for the mutable pointers datastore", true, "mutable.sql"), new Command.Arg("social-sql-file", "The filename for the follow requests datastore", true, "social.sql"), new Command.Arg("space-requests-sql-file", "The filename for the space requests datastore", true, "space-requests.sql"), new Command.Arg("webroot", "the path to the directory to serve as the web root", false), new Command.Arg("default-quota", "default maximum storage per user", false, Long.toString(1024L * 1024 * 1024)), new Command.Arg("mirror.node.id", "Mirror a server's data locally", false), new Command.Arg("mirror.username", "Mirror a user's data locally", false), new Command.Arg("collect-metrics", "Export aggregated metrics", false, "false"), new Command.Arg("metrics.port", "Port for serving aggregated metrics", false, "8001") ).collect(Collectors.toList()) ); private static final void bootstrap(Args args) { try { // This means creating a pki keypair and publishing the public key Crypto crypto = Crypto.initJava(); // setup peergos user and pki keys String peergosPassword = args.getArg("peergos.password"); String pkiUsername = "peergos"; UserWithRoot peergos = UserUtil.generateUser(pkiUsername, peergosPassword, crypto.hasher, crypto.symmetricProvider, crypto.random, crypto.signer, crypto.boxer, SecretGenerationAlgorithm.getDefaultWithoutExtraSalt()).get(); boolean useIPFS = args.getBoolean("useIPFS"); String ipfsApiAddress = args.getArg("ipfs-api-address", "/ip4/127.0.0.1/tcp/5001"); ContentAddressedStorage dht = useIPFS ? new IpfsDHT(new MultiAddress(ipfsApiAddress)) : new FileContentAddressedStorage(blockstorePath(args)); SigningKeyPair peergosIdentityKeys = peergos.getUser(); PublicKeyHash peergosPublicHash = ContentAddressedStorage.hashKey(peergosIdentityKeys.publicSigningKey); String pkiPassword = args.getArg("pki.keygen.password"); if (peergosPassword.equals(pkiPassword)) throw new IllegalStateException("Pki password and peergos password must be different!!"); SigningKeyPair pkiKeys = UserUtil.generateUser(pkiUsername, pkiPassword, crypto.hasher, crypto.symmetricProvider, crypto.random, crypto.signer, crypto.boxer, SecretGenerationAlgorithm.getDefaultWithoutExtraSalt()).get().getUser(); IpfsTransaction.call(peergosPublicHash, tid -> dht.putSigningKey(peergosIdentityKeys.secretSigningKey.signatureOnly( pkiKeys.publicSigningKey.serialize()), peergosPublicHash, pkiKeys.publicSigningKey, tid), dht).get(); String pkiKeyfilePassword = args.getArg("pki.keyfile.password"); Cborable cipherTextCbor = PasswordProtected.encryptWithPassword(pkiKeys.secretSigningKey.toCbor().toByteArray(), pkiKeyfilePassword, crypto.hasher, crypto.symmetricProvider, crypto.random); Files.write(args.fromPeergosDir("pki.secret.key.path"), cipherTextCbor.serialize()); Files.write(args.fromPeergosDir("pki.public.key.path"), pkiKeys.publicSigningKey.toCbor().toByteArray()); args.setIfAbsent("peergos.identity.hash", peergosPublicHash.toString()); System.out.println("Peergos user identity hash: " + peergosPublicHash); } catch (Exception e) { e.printStackTrace(); System.exit(1); } } public static final void poststrap(Args args) { try { // The final step of bootstrapping a new peergos network, which must be run once after network bootstrap // This means signing up the peergos user, and adding the pki public key to the peergos user Crypto crypto = Crypto.initJava(); // recreate peergos user and pki keys String password = args.getArg("peergos.password"); String pkiUsername = "peergos"; UserWithRoot peergos = UserUtil.generateUser(pkiUsername, password, crypto.hasher, crypto.symmetricProvider, crypto.random, crypto.signer, crypto.boxer, SecretGenerationAlgorithm.getDefaultWithoutExtraSalt()).get(); SigningKeyPair peergosIdentityKeys = peergos.getUser(); PublicKeyHash peergosPublicHash = ContentAddressedStorage.hashKey(peergosIdentityKeys.publicSigningKey); PublicSigningKey pkiPublic = PublicSigningKey.fromByteArray( Files.readAllBytes(args.fromPeergosDir("pki.public.key.path"))); PublicKeyHash pkiPublicHash = ContentAddressedStorage.hashKey(pkiPublic); int webPort = args.getInt("port"); NetworkAccess network = NetworkAccess.buildJava(new URL("http://localhost:" + webPort)).get(); String pkiFilePassword = args.getArg("pki.keyfile.password"); SecretSigningKey pkiSecret = SecretSigningKey.fromCbor(CborObject.fromByteArray(PasswordProtected.decryptWithPassword( CborObject.fromByteArray(Files.readAllBytes(args.fromPeergosDir("pki.secret.key.path"))), pkiFilePassword, crypto.hasher, crypto.symmetricProvider, crypto.random))); // sign up peergos user SecretGenerationAlgorithm algorithm = SecretGenerationAlgorithm.getDefaultWithoutExtraSalt(); UserContext context = UserContext.signUpGeneral(pkiUsername, password, network, crypto, algorithm, x -> {}).get(); Optional<PublicKeyHash> existingPkiKey = context.getNamedKey("pki").get(); if (!existingPkiKey.isPresent() || existingPkiKey.get().equals(pkiPublicHash)) { SigningPrivateKeyAndPublicHash pkiKeyPair = new SigningPrivateKeyAndPublicHash(pkiPublicHash, pkiSecret); // write pki public key to ipfs IpfsTransaction.call(peergosPublicHash, tid -> network.dhtClient.putSigningKey(peergosIdentityKeys.secretSigningKey .signatureOnly(pkiPublic.serialize()), peergosPublicHash, pkiPublic, tid), network.dhtClient).get(); context.addNamedOwnedKeyAndCommit("pki", pkiKeyPair).join(); } // Create /peergos/releases and make it public Optional<FileWrapper> releaseDir = context.getByPath(Paths.get(pkiUsername, "releases")).join(); if (! releaseDir.isPresent()) { context.getUserRoot().join().mkdir("releases", network, false, crypto).join(); FileWrapper releases = context.getByPath(Paths.get(pkiUsername, "releases")).join().get(); context.makePublic(releases).join(); } } catch (Exception e) { e.printStackTrace(); System.exit(1); } } public static final Command<UserService> PKI_INIT = new Command<>("pki-init", "Bootstrap and start the Peergos PKI Server", args -> { try { int peergosPort = args.getInt("port"); int ipfsApiPort = args.getInt("ipfs-config-api-port"); args.setIfAbsent("proxy-target", getLocalMultiAddress(peergosPort).toString()); IpfsWrapper ipfs = null; boolean useIPFS = args.getBoolean("useIPFS"); if (useIPFS) { ENSURE_IPFS_INSTALLED.main(args); ipfs = startIpfs(args); } args.setArg("ipfs-api-address", getLocalMultiAddress(ipfsApiPort).toString()); bootstrap(args); Multihash pkiIpfsNodeId = useIPFS ? new IpfsDHT(getLocalMultiAddress(ipfsApiPort)).id().get() : new FileContentAddressedStorage(blockstorePath(args)).id().get(); if (ipfs != null) ipfs.stop(); args.setIfAbsent("pki-node-id", pkiIpfsNodeId.toBase58()); UserService daemon = PEERGOS.main(args); poststrap(args); return daemon; } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } }, Arrays.asList( new Command.Arg("domain", "The hostname to listen on", true, "localhost"), new Command.Arg("port", "The port for the local non tls server to listen on", true, "8000"), new Command.Arg("useIPFS", "Whether to use IPFS or a local datastore", true, "false"), new Command.Arg("mutable-pointers-file", "The filename for the mutable pointers (or :memory: or ram based)", true, ":memory:"), new Command.Arg("social-sql-file", "The filename for the follow requests (or :memory: or ram based)", true, ":memory:"), new Command.Arg("space-requests-sql-file", "The filename for the space requests datastore", true, "space-requests.sql"), new Command.Arg("ipfs-config-api-port", "ipfs api port", true, "5001"), new Command.Arg("ipfs-config-gateway-port", "ipfs gateway port", true, "8080"), new Command.Arg("pki.secret.key.path", "The path to the pki secret key file", true, "test.pki.secret.key"), new Command.Arg("pki.public.key.path", "The path to the pki public key file", true, "test.pki.public.key"), // Secret parameters new Command.Arg("peergos.password", "The password for the 'peergos' user", true), new Command.Arg("pki.keygen.password", "The password to generate the pki key from", true), new Command.Arg("pki.keyfile.password", "The password protecting the pki keyfile", true) ) ); public static final Command<UserService> PKI = new Command<>("pki", "Start the Peergos PKI Server that has already been bootstrapped", args -> { try { int peergosPort = args.getInt("port"); int ipfsApiPort = args.getInt("ipfs-config-api-port"); args.setIfAbsent("proxy-target", getLocalMultiAddress(peergosPort).toString()); IpfsWrapper ipfs = null; boolean useIPFS = args.getBoolean("useIPFS"); if (useIPFS) { ENSURE_IPFS_INSTALLED.main(args); ipfs = startIpfs(args); } args.setArg("ipfs-api-address", getLocalMultiAddress(ipfsApiPort).toString()); Multihash pkiIpfsNodeId = useIPFS ? new IpfsDHT(getLocalMultiAddress(ipfsApiPort)).id().get() : new FileContentAddressedStorage(blockstorePath(args)).id().get(); if (ipfs != null) ipfs.stop(); args.setIfAbsent("pki-node-id", pkiIpfsNodeId.toBase58()); return PEERGOS.main(args); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } }, Arrays.asList( new Command.Arg("peergos.identity.hash", "The hostname to listen on", true), new Command.Arg("domain", "The hostname to listen on", true, "localhost"), new Command.Arg("port", "The port for the local non tls server to listen on", true, "8000"), new Command.Arg("useIPFS", "Whether to use IPFS or a local datastore", true, "false"), new Command.Arg("mutable-pointers-file", "The filename for the mutable pointers (or :memory: or ram based)", true, ":memory:"), new Command.Arg("social-sql-file", "The filename for the follow requests (or :memory: or ram based)", true, ":memory:"), new Command.Arg("space-requests-sql-file", "The filename for the space requests datastore", true, "space-requests.sql"), new Command.Arg("ipfs-config-api-port", "ipfs api port", true, "5001"), new Command.Arg("ipfs-config-gateway-port", "ipfs gateway port", true, "8080"), new Command.Arg("pki.secret.key.path", "The path to the pki secret key file", true, "test.pki.secret.key"), new Command.Arg("pki.public.key.path", "The path to the pki public key file", true, "test.pki.public.key"), // Secret parameters new Command.Arg("pki.keyfile.password", "The password protecting the pki keyfile", true) ) ); public static final Command<FuseProcess> FUSE = new Command<>("fuse", "Mount a Peergos user's filesystem natively", Main::startFuse, Stream.of( new Command.Arg("username", "Peergos username", true), new Command.Arg("password", "Peergos password", true), new Command.Arg("webport", "Peergos service address port", false, "8000"), new Command.Arg("mountPoint", "The directory to mount the Peergos filesystem in", true, "peergos") ).collect(Collectors.toList()) ); public static final Command<Boolean> SHELL = new Command<>("shell", "An interactive command-line-interface to a Peergos server.", Main::startShell, Collections.emptyList() ); public static UserService startPeergos(Args a) { try { Crypto crypto = Crypto.initJava(); int webPort = a.getInt("port"); MultiAddress localPeergosApi = getLocalMultiAddress(webPort); a.setIfAbsent("proxy-target", localPeergosApi.toString()); boolean useIPFS = a.getBoolean("useIPFS"); IpfsWrapper ipfsWrapper = null; if (useIPFS) { ENSURE_IPFS_INSTALLED.main(a); ipfsWrapper = IPFS.main(a); } boolean doExportAggregatedMetrics = a.getBoolean("metrics.do_export"); if (doExportAggregatedMetrics) { int exporterPort = a.getInt("metrics.exporter_port"); AggregatedMetrics.startExporter(exporterPort); } Multihash pkiServerNodeId = Cid.decode(a.getArg("pki-node-id")); URL ipfsApiAddress = AddressUtil.getLocalAddress(a.getInt("ipfs-config-api-port")); URL ipfsGatewayAddress = AddressUtil.getLocalAddress(a.getInt("ipfs-config-gateway-port")); String domain = a.getArg("domain"); InetSocketAddress userAPIAddress = new InetSocketAddress(domain, webPort); int dhtCacheEntries = 1000; int maxValueSizeToCache = 50 * 1024; JavaPoster ipfsApi = new JavaPoster(ipfsApiAddress); JavaPoster ipfsGateway = new JavaPoster(ipfsGatewayAddress); ContentAddressedStorage localDht; if (useIPFS) { boolean enableGC = a.getBoolean("enable-gc", true); ContentAddressedStorage.HTTP ipfs = new ContentAddressedStorage.HTTP(ipfsApi, false); if (enableGC) { GarbageCollector gced = new GarbageCollector(ipfs, a.getInt("gc.period.millis", 60 * 60 * 1000)); gced.start(); localDht = new CachingStorage(gced, dhtCacheEntries, maxValueSizeToCache); } else localDht = new CachingStorage(ipfs, dhtCacheEntries, maxValueSizeToCache); } else localDht = new FileContentAddressedStorage(blockstorePath(a)); String hostname = a.getArg("domain"); Multihash nodeId = localDht.id().get(); boolean usePostgres = a.getBoolean("use-postgres", false); JdbcIpnsAndSocial.SqlSupplier sqlCommands = usePostgres ? new JdbcIpnsAndSocial.PostgresCommands() : new JdbcIpnsAndSocial.SqliteCommands(); Connection database; if (usePostgres) { String postgresHost = a.getArg("postgres.host"); int postgresPort = a.getInt("postgres.port", 5432); String databaseName = a.getArg("postgres.database", "peergos"); String postgresUsername = a.getArg("postgres.username"); String postgresPassword = a.getArg("postgres.password"); database = Postgres.build(postgresHost, postgresPort, databaseName, postgresUsername, postgresPassword); } else { database = Sqlite.build(Sqlite.getDbPath(a, "mutable-pointers-file")); } JdbcIpnsAndSocial rawPointers = new JdbcIpnsAndSocial(database, sqlCommands); MutablePointers localPointers = UserRepository.build(localDht, rawPointers); MutablePointersProxy proxingMutable = new HttpMutablePointers(ipfsGateway, pkiServerNodeId); PublicKeyHash peergosId = PublicKeyHash.fromString(a.getArg("peergos.identity.hash")); // build a mirroring proxying corenode, unless we are the pki node boolean isPkiNode = nodeId.equals(pkiServerNodeId); CoreNode core = isPkiNode ? buildPkiCorenode(new PinningMutablePointers(localPointers, localDht), localDht, a) : new MirrorCoreNode(new HTTPCoreNode(ipfsGateway, pkiServerNodeId), proxingMutable, localDht, peergosId, a.fromPeergosDir("pki-mirror-state-path","pki-state.cbor")); long defaultQuota = a.getLong("default-quota"); long maxUsers = a.getLong("max-users"); Logging.LOG().info("Using default user space quota of " + defaultQuota); Path quotaFilePath = a.fromPeergosDir("quotas_file","quotas.txt"); Path statePath = a.fromPeergosDir("state_path","usage-state.cbor"); Connection spaceDb = usePostgres ? database : Sqlite.build(Sqlite.getDbPath(a, "space-requests-sql-file")); JdbcSpaceRequests spaceRequests = JdbcSpaceRequests.build(spaceDb, sqlCommands); UserQuotas userQuotas = new UserQuotas(quotaFilePath, defaultQuota, maxUsers); CoreNode signupFilter = new SignUpFilter(core, userQuotas, nodeId); SpaceCheckingKeyFilter spaceChecker = new SpaceCheckingKeyFilter(core, localPointers, localDht, userQuotas, spaceRequests, statePath); CorenodeEventPropagator corePropagator = new CorenodeEventPropagator(signupFilter); corePropagator.addListener(spaceChecker::accept); MutableEventPropagator localMutable = new MutableEventPropagator(localPointers); localMutable.addListener(spaceChecker::accept); ContentAddressedStorage filteringDht = new WriteFilter(localDht, spaceChecker::allowWrite); ContentAddressedStorageProxy proxingDht = new ContentAddressedStorageProxy.HTTP(ipfsGateway); ContentAddressedStorage p2pDht = new ContentAddressedStorage.Proxying(filteringDht, proxingDht, nodeId, core); Path blacklistPath = a.fromPeergosDir("blacklist_file", "blacklist.txt"); PublicKeyBlackList blacklist = new UserBasedBlacklist(blacklistPath, core, localMutable, p2pDht); MutablePointers blockingMutablePointers = new BlockingMutablePointers(new PinningMutablePointers(localMutable, p2pDht), blacklist); MutablePointers p2mMutable = new ProxyingMutablePointers(nodeId, core, blockingMutablePointers, proxingMutable); SocialNetworkProxy httpSocial = new HttpSocialNetwork(ipfsGateway, ipfsGateway); Connection socialDatabase = usePostgres ? database : Sqlite.build(Sqlite.getDbPath(a, "social-sql-file")); JdbcIpnsAndSocial rawSocial = new JdbcIpnsAndSocial(socialDatabase, sqlCommands); SocialNetwork local = UserRepository.build(p2pDht, rawSocial); SocialNetwork p2pSocial = new ProxyingSocialNetwork(nodeId, core, local, httpSocial); Path userPath = a.fromPeergosDir("whitelist_file", "user_whitelist.txt"); int delayMs = a.getInt("whitelist_sleep_period", 1000 * 60 * 10); new UserFilePinner(userPath, core, p2mMutable, p2pDht, delayMs).start(); Set<String> adminUsernames = Arrays.asList(a.getArg("admin-usernames").split(",")) .stream() .collect(Collectors.toSet()); Admin storageAdmin = new Admin(adminUsernames, spaceRequests, userQuotas, core, localDht); HttpSpaceUsage httpSpaceUsage = new HttpSpaceUsage(ipfsGateway, ipfsGateway); ProxyingSpaceUsage p2pSpaceUsage = new ProxyingSpaceUsage(nodeId, corePropagator, spaceChecker, httpSpaceUsage); UserService peergos = new UserService(p2pDht, crypto, corePropagator, p2pSocial, p2mMutable, storageAdmin, p2pSpaceUsage); InetSocketAddress localAddress = new InetSocketAddress("localhost", userAPIAddress.getPort()); Optional<Path> webroot = a.hasArg("webroot") ? Optional.of(Paths.get(a.getArg("webroot"))) : Optional.empty(); boolean useWebAssetCache = a.getBoolean("webcache", true); Optional<String> tlsHostname = hostname.equals("localhost") ? Optional.empty() : Optional.of(hostname); Optional<UserService.TlsProperties> tlsProps = tlsHostname.map(host -> new UserService.TlsProperties(host, a.getArg("tls.keyfile.password"))); peergos.initAndStart(localAddress, tlsProps, webroot, useWebAssetCache); if (! isPkiNode) { int pkiNodeSwarmPort = a.getInt("pki.node.swarm.port"); InetAddress pkiNodeIpAddress = InetAddress.getByName(a.getArg("pki.node.ipaddress")); ipfsWrapper.connectToNode(new InetSocketAddress(pkiNodeIpAddress, pkiNodeSwarmPort), pkiServerNodeId); ((MirrorCoreNode) core).start(); } spaceChecker.calculateUsage(); if (a.hasArg("mirror.node.id")) { Multihash nodeToMirrorId = Cid.decode(a.getArg("mirror.node.id")); NetworkAccess localApi = NetworkAccess.buildJava(webPort).join(); new Thread(() -> { while (true) { try { Mirror.mirrorNode(nodeToMirrorId, localApi, rawPointers, localDht); try { Thread.sleep(60_000); } catch (InterruptedException f) {} } catch (Exception e) { e.printStackTrace(); try { Thread.sleep(5_000); } catch (InterruptedException f) {} } } }).start(); } if (a.hasArg("mirror.username")) { NetworkAccess localApi = NetworkAccess.buildJava(webPort).join(); new Thread(() -> { while (true) { try { Mirror.mirrorUser(a.getArg("mirror.username"), localApi, rawPointers, localDht); try { Thread.sleep(60_000); } catch (InterruptedException f) {} } catch (Exception e) { e.printStackTrace(); try { Thread.sleep(5_000); } catch (InterruptedException f) {} } } }).start(); } return peergos; } catch (Exception e) { throw new RuntimeException(e); } } public static FuseProcess startFuse(Args a) { String username = a.getArg("username"); String password = a.getArg("password"); int webPort = a.getInt("webport"); try { Files.createTempDirectory("peergos").toString(); } catch (IOException ioe) { throw new IllegalStateException(ioe); } String mountPath = a.getArg("mountPoint"); Path path = Paths.get(mountPath); path.toFile().mkdirs(); System.out.println("\n\nPeergos mounted at " + path + "\n\n"); try { NetworkAccess network = NetworkAccess.buildJava(webPort).get(); Crypto crypto = Crypto.initJava(); UserContext userContext = PeergosNetworkUtils.ensureSignedUp(username, password, network, crypto); PeergosFS peergosFS = new PeergosFS(userContext); FuseProcess fuseProcess = new FuseProcess(peergosFS, path); Runtime.getRuntime().addShutdownHook(new Thread(() -> fuseProcess.close(), "Fuse shutdown")); fuseProcess.start(); return fuseProcess; } catch (Exception ex) { throw new IllegalStateException(ex); } } public static IpfsWrapper startIpfs(Args a) { // test if ipfs is already running int ipfsApiPort = IpfsWrapper.getApiPort(a); if (IpfsWrapper.isHttpApiListening(ipfsApiPort)) { throw new IllegalStateException("IPFS is already running on api port " + ipfsApiPort); } IpfsWrapper ipfs = IpfsWrapper.build(a); if (a.getBoolean("ipfs-manage-runtime", true)) IpfsWrapper.launchAndManage(ipfs); else { IpfsWrapper.launchOnce(ipfs); } // wait for daemon to finish starting ipfs.waitForDaemon(10); return ipfs; } public static Boolean startShell(Args args) { CLI.main(new String[]{}); return true; } private static CoreNode buildPkiCorenode(MutablePointers mutable, ContentAddressedStorage dht, Args a) { try { Crypto crypto = Crypto.initJava(); PublicKeyHash peergosIdentity = PublicKeyHash.fromString(a.getArg("peergos.identity.hash")); String pkiSecretKeyfilePassword = a.getArg("pki.keyfile.password"); PublicSigningKey pkiPublic = PublicSigningKey.fromByteArray( Files.readAllBytes(a.fromPeergosDir("pki.public.key.path"))); SecretSigningKey pkiSecretKey = SecretSigningKey.fromCbor(CborObject.fromByteArray( PasswordProtected.decryptWithPassword( CborObject.fromByteArray(Files.readAllBytes(a.fromPeergosDir("pki.secret.key.path"))), pkiSecretKeyfilePassword, crypto.hasher, crypto.symmetricProvider, crypto.random ))); SigningKeyPair pkiKeys = new SigningKeyPair(pkiPublic, pkiSecretKey); PublicKeyHash pkiPublicHash = ContentAddressedStorage.hashKey(pkiKeys.publicSigningKey); MaybeMultihash currentPkiRoot = mutable.getPointerTarget(peergosIdentity, pkiPublicHash, dht).get(); SigningPrivateKeyAndPublicHash pkiSigner = new SigningPrivateKeyAndPublicHash(pkiPublicHash, pkiSecretKey); if (! currentPkiRoot.isPresent()) currentPkiRoot = IpfsTransaction.call(peergosIdentity, tid -> WriterData.createEmpty(peergosIdentity, pkiSigner, dht, tid).join() .commit(peergosIdentity, pkiSigner, MaybeMultihash.empty(), mutable, dht, tid) .thenApply(version -> version.get(pkiSigner).hash), dht).join(); return new IpfsCoreNode(pkiSigner, currentPkiRoot, dht, mutable, peergosIdentity); } catch (Exception e) { throw new RuntimeException(e); } } public static final Command<Void> MAIN = new Command<>("Main", "Run a Peergos command", args -> { System.out.println("Run with -help to show options"); return null; }, Collections.emptyList(), Arrays.asList( PKI_INIT, PKI, PEERGOS, FUSE, SHELL ) ); /** * Create path to local blockstore directory from Args. * * @param args * @return */ private static Path blockstorePath(Args args) { return args.fromPeergosDir("blockstore_dir", "blockstore"); } public static MultiAddress getLocalMultiAddress(int port) { return new MultiAddress("/ip4/127.0.0.1/tcp/" + port); } public static MultiAddress getLocalBootstrapAddress(int port, Multihash nodeId) { return new MultiAddress("/ip4/127.0.0.1/tcp/" + port + "/ipfs/"+ nodeId); } public static void main(String[] args) { MAIN.main(Args.parse(args)); } }
src/peergos/server/Main.java
package peergos.server; import peergos.server.cli.CLI; import peergos.server.space.*; import peergos.server.storage.admin.*; import peergos.shared.*; import peergos.server.corenode.*; import peergos.server.fuse.*; import peergos.server.mutable.*; import peergos.server.storage.*; import peergos.server.util.*; import peergos.shared.cbor.*; import peergos.shared.corenode.*; import peergos.shared.crypto.*; import peergos.shared.crypto.asymmetric.*; import peergos.shared.crypto.asymmetric.curve25519.*; import peergos.shared.crypto.hash.*; import peergos.shared.crypto.password.*; import peergos.shared.io.ipfs.multiaddr.*; import peergos.shared.io.ipfs.cid.*; import peergos.shared.io.ipfs.multihash.*; import peergos.shared.mutable.*; import peergos.shared.social.*; import peergos.shared.storage.*; import peergos.shared.user.*; import peergos.shared.user.fs.*; import java.io.*; import java.net.*; import java.nio.file.*; import java.sql.*; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; public class Main { public static final String PEERGOS_PATH = "PEERGOS_PATH"; public static final Path DEFAULT_PEERGOS_DIR_PATH = Paths.get(System.getProperty("user.home"), ".peergos"); static { PublicSigningKey.addProvider(PublicSigningKey.Type.Ed25519, new Ed25519.Java()); } public static Command<Boolean> ENSURE_IPFS_INSTALLED = new Command<>("install-ipfs", "Download/update IPFS binary. Does nothing if current IPFS binary is up-to-date.", args -> { Path ipfsExePath = IpfsWrapper.getIpfsExePath(args); File dir = ipfsExePath.getParent().toFile(); if (! dir.isDirectory() && ! dir.mkdirs()) throw new IllegalStateException("Specified install directory "+ dir +" doesn't exist and can't be created"); IpfsInstaller.ensureInstalled(ipfsExePath); List<IpfsInstaller.Plugin> plugins = IpfsInstaller.Plugin.parseAll(args); Path ipfsDir = IpfsWrapper.getIpfsDir(args); if (! plugins.isEmpty()) if (! ipfsDir.toFile().exists() && ! ipfsDir.toFile().mkdirs()) throw new IllegalStateException("Couldn't create ipfs dir: " + ipfsDir); for (IpfsInstaller.Plugin plugin : plugins) { plugin.ensureInstalled(ipfsDir); } return true; }, Arrays.asList( new Command.Arg("ipfs-exe-path", "Desired path to IPFS executable. Defaults to $PEERGOS_PATH/ipfs", false), new Command.Arg("ipfs-plugins", "comma separated list of ipfs plugins to install, currently only go-ds-s3 is supported", false), new Command.Arg("s3.path", "Path of data store in S3", false, "blocks"), new Command.Arg("s3.bucket", "S3 bucket name", false), new Command.Arg("s3.region", "S3 region", false, "us-east-1"), new Command.Arg("s3.accessKey", "S3 access key", false, ""), new Command.Arg("s3.secretKey", "S3 secret key", false, ""), new Command.Arg("s3.region.endpoint", "Base url for S3 service", false) ) ); public static Command<IpfsWrapper> IPFS = new Command<>("ipfs", "Start IPFS daemon and ensure configuration, optionally manage runtime.", Main::startIpfs, Arrays.asList( new Command.Arg("IPFS_PATH", "Path to IPFS directory. Defaults to $PEERGOS_PATH/.ipfs, or ~/.peergos/.ipfs", false), new Command.Arg("ipfs-exe-path", "Path to IPFS executable. Defaults to $PEERGOS_PATH/ipfs", false), new Command.Arg("ipfs-config-api-port", "IPFS API port", false, "5001"), new Command.Arg("ipfs-config-gateway-port", "IPFS Gateway port", false, "8080"), new Command.Arg("ipfs-config-swarm-port", "IPFS Swarm port", false, "4001"), new Command.Arg("ipfs-config-bootstrap-node-list", "Comma separated list of IPFS bootstrap nodes. Uses existing bootstrap nodes by default.", false), new Command.Arg("ipfs-manage-runtime", "Will manage the IPFS daemon runtime when set (restart on exit)", false, "true") ) ); public static final Command<UserService> PEERGOS = new Command<>("daemon", "The user facing Peergos server", Main::startPeergos, Stream.of( new Command.Arg("port", "service port", false, "8000"), new Command.Arg("peergos.identity.hash", "The hash of peergos user's public key, this is used to bootstrap the pki", true, "z59vuwzfFDp3ZA8ZpnnmHEuMtyA1q34m3Th49DYXQVJntWpxdGrRqXi"), new Command.Arg("pki-node-id", "Ipfs node id of the pki node", true, "QmVdFZgHnEgcedCS2G2ZNiEN59LuVrnRm7z3yXtEBv2XiF"), new Command.Arg("pki.node.ipaddress", "IP address of the pki node", true, "172.104.157.121"), new Command.Arg("pki.node.swarm.port", "Swarm port of the pki node", true, "5001"), new Command.Arg("domain", "Domain name to bind to,", false, "localhost"), new Command.Arg("max-users", "The maximum number of local users", false, "1"), new Command.Arg("useIPFS", "Use IPFS for storage or a local disk store", false, "true"), new Command.Arg("mutable-pointers-file", "The filename for the mutable pointers datastore", true, "mutable.sql"), new Command.Arg("social-sql-file", "The filename for the follow requests datastore", true, "social.sql"), new Command.Arg("space-requests-sql-file", "The filename for the space requests datastore", true, "space-requests.sql"), new Command.Arg("webroot", "the path to the directory to serve as the web root", false), new Command.Arg("default-quota", "default maximum storage per user", false, Long.toString(1024L * 1024 * 1024)), new Command.Arg("mirror.node.id", "Mirror a server's data locally", false), new Command.Arg("mirror.username", "Mirror a user's data locally", false), new Command.Arg("metrics.do_export", "Export aggregated metrics.", false, "false"), new Command.Arg("metrics.exporter_port", "Port for serving aggregated metrics.", false, "8001") ).collect(Collectors.toList()) ); private static final void bootstrap(Args args) { try { // This means creating a pki keypair and publishing the public key Crypto crypto = Crypto.initJava(); // setup peergos user and pki keys String peergosPassword = args.getArg("peergos.password"); String pkiUsername = "peergos"; UserWithRoot peergos = UserUtil.generateUser(pkiUsername, peergosPassword, crypto.hasher, crypto.symmetricProvider, crypto.random, crypto.signer, crypto.boxer, SecretGenerationAlgorithm.getDefaultWithoutExtraSalt()).get(); boolean useIPFS = args.getBoolean("useIPFS"); String ipfsApiAddress = args.getArg("ipfs-api-address", "/ip4/127.0.0.1/tcp/5001"); ContentAddressedStorage dht = useIPFS ? new IpfsDHT(new MultiAddress(ipfsApiAddress)) : new FileContentAddressedStorage(blockstorePath(args)); SigningKeyPair peergosIdentityKeys = peergos.getUser(); PublicKeyHash peergosPublicHash = ContentAddressedStorage.hashKey(peergosIdentityKeys.publicSigningKey); String pkiPassword = args.getArg("pki.keygen.password"); if (peergosPassword.equals(pkiPassword)) throw new IllegalStateException("Pki password and peergos password must be different!!"); SigningKeyPair pkiKeys = UserUtil.generateUser(pkiUsername, pkiPassword, crypto.hasher, crypto.symmetricProvider, crypto.random, crypto.signer, crypto.boxer, SecretGenerationAlgorithm.getDefaultWithoutExtraSalt()).get().getUser(); IpfsTransaction.call(peergosPublicHash, tid -> dht.putSigningKey(peergosIdentityKeys.secretSigningKey.signatureOnly( pkiKeys.publicSigningKey.serialize()), peergosPublicHash, pkiKeys.publicSigningKey, tid), dht).get(); String pkiKeyfilePassword = args.getArg("pki.keyfile.password"); Cborable cipherTextCbor = PasswordProtected.encryptWithPassword(pkiKeys.secretSigningKey.toCbor().toByteArray(), pkiKeyfilePassword, crypto.hasher, crypto.symmetricProvider, crypto.random); Files.write(args.fromPeergosDir("pki.secret.key.path"), cipherTextCbor.serialize()); Files.write(args.fromPeergosDir("pki.public.key.path"), pkiKeys.publicSigningKey.toCbor().toByteArray()); args.setIfAbsent("peergos.identity.hash", peergosPublicHash.toString()); System.out.println("Peergos user identity hash: " + peergosPublicHash); } catch (Exception e) { e.printStackTrace(); System.exit(1); } } public static final void poststrap(Args args) { try { // The final step of bootstrapping a new peergos network, which must be run once after network bootstrap // This means signing up the peergos user, and adding the pki public key to the peergos user Crypto crypto = Crypto.initJava(); // recreate peergos user and pki keys String password = args.getArg("peergos.password"); String pkiUsername = "peergos"; UserWithRoot peergos = UserUtil.generateUser(pkiUsername, password, crypto.hasher, crypto.symmetricProvider, crypto.random, crypto.signer, crypto.boxer, SecretGenerationAlgorithm.getDefaultWithoutExtraSalt()).get(); SigningKeyPair peergosIdentityKeys = peergos.getUser(); PublicKeyHash peergosPublicHash = ContentAddressedStorage.hashKey(peergosIdentityKeys.publicSigningKey); PublicSigningKey pkiPublic = PublicSigningKey.fromByteArray( Files.readAllBytes(args.fromPeergosDir("pki.public.key.path"))); PublicKeyHash pkiPublicHash = ContentAddressedStorage.hashKey(pkiPublic); int webPort = args.getInt("port"); NetworkAccess network = NetworkAccess.buildJava(new URL("http://localhost:" + webPort)).get(); String pkiFilePassword = args.getArg("pki.keyfile.password"); SecretSigningKey pkiSecret = SecretSigningKey.fromCbor(CborObject.fromByteArray(PasswordProtected.decryptWithPassword( CborObject.fromByteArray(Files.readAllBytes(args.fromPeergosDir("pki.secret.key.path"))), pkiFilePassword, crypto.hasher, crypto.symmetricProvider, crypto.random))); // sign up peergos user SecretGenerationAlgorithm algorithm = SecretGenerationAlgorithm.getDefaultWithoutExtraSalt(); UserContext context = UserContext.signUpGeneral(pkiUsername, password, network, crypto, algorithm, x -> {}).get(); Optional<PublicKeyHash> existingPkiKey = context.getNamedKey("pki").get(); if (!existingPkiKey.isPresent() || existingPkiKey.get().equals(pkiPublicHash)) { SigningPrivateKeyAndPublicHash pkiKeyPair = new SigningPrivateKeyAndPublicHash(pkiPublicHash, pkiSecret); // write pki public key to ipfs IpfsTransaction.call(peergosPublicHash, tid -> network.dhtClient.putSigningKey(peergosIdentityKeys.secretSigningKey .signatureOnly(pkiPublic.serialize()), peergosPublicHash, pkiPublic, tid), network.dhtClient).get(); context.addNamedOwnedKeyAndCommit("pki", pkiKeyPair).join(); } // Create /peergos/releases and make it public Optional<FileWrapper> releaseDir = context.getByPath(Paths.get(pkiUsername, "releases")).join(); if (! releaseDir.isPresent()) { context.getUserRoot().join().mkdir("releases", network, false, crypto).join(); FileWrapper releases = context.getByPath(Paths.get(pkiUsername, "releases")).join().get(); context.makePublic(releases).join(); } } catch (Exception e) { e.printStackTrace(); System.exit(1); } } public static final Command<UserService> PKI_INIT = new Command<>("pki-init", "Bootstrap and start the Peergos PKI Server", args -> { try { int peergosPort = args.getInt("port"); int ipfsApiPort = args.getInt("ipfs-config-api-port"); args.setIfAbsent("proxy-target", getLocalMultiAddress(peergosPort).toString()); IpfsWrapper ipfs = null; boolean useIPFS = args.getBoolean("useIPFS"); if (useIPFS) { ENSURE_IPFS_INSTALLED.main(args); ipfs = startIpfs(args); } args.setArg("ipfs-api-address", getLocalMultiAddress(ipfsApiPort).toString()); bootstrap(args); Multihash pkiIpfsNodeId = useIPFS ? new IpfsDHT(getLocalMultiAddress(ipfsApiPort)).id().get() : new FileContentAddressedStorage(blockstorePath(args)).id().get(); if (ipfs != null) ipfs.stop(); args.setIfAbsent("pki-node-id", pkiIpfsNodeId.toBase58()); UserService daemon = PEERGOS.main(args); poststrap(args); return daemon; } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } }, Arrays.asList( new Command.Arg("domain", "The hostname to listen on", true, "localhost"), new Command.Arg("port", "The port for the local non tls server to listen on", true, "8000"), new Command.Arg("useIPFS", "Whether to use IPFS or a local datastore", true, "false"), new Command.Arg("mutable-pointers-file", "The filename for the mutable pointers (or :memory: or ram based)", true, ":memory:"), new Command.Arg("social-sql-file", "The filename for the follow requests (or :memory: or ram based)", true, ":memory:"), new Command.Arg("space-requests-sql-file", "The filename for the space requests datastore", true, "space-requests.sql"), new Command.Arg("ipfs-config-api-port", "ipfs api port", true, "5001"), new Command.Arg("ipfs-config-gateway-port", "ipfs gateway port", true, "8080"), new Command.Arg("pki.secret.key.path", "The path to the pki secret key file", true, "test.pki.secret.key"), new Command.Arg("pki.public.key.path", "The path to the pki public key file", true, "test.pki.public.key"), // Secret parameters new Command.Arg("peergos.password", "The password for the 'peergos' user", true), new Command.Arg("pki.keygen.password", "The password to generate the pki key from", true), new Command.Arg("pki.keyfile.password", "The password protecting the pki keyfile", true) ) ); public static final Command<UserService> PKI = new Command<>("pki", "Start the Peergos PKI Server that has already been bootstrapped", args -> { try { int peergosPort = args.getInt("port"); int ipfsApiPort = args.getInt("ipfs-config-api-port"); args.setIfAbsent("proxy-target", getLocalMultiAddress(peergosPort).toString()); IpfsWrapper ipfs = null; boolean useIPFS = args.getBoolean("useIPFS"); if (useIPFS) { ENSURE_IPFS_INSTALLED.main(args); ipfs = startIpfs(args); } args.setArg("ipfs-api-address", getLocalMultiAddress(ipfsApiPort).toString()); Multihash pkiIpfsNodeId = useIPFS ? new IpfsDHT(getLocalMultiAddress(ipfsApiPort)).id().get() : new FileContentAddressedStorage(blockstorePath(args)).id().get(); if (ipfs != null) ipfs.stop(); args.setIfAbsent("pki-node-id", pkiIpfsNodeId.toBase58()); return PEERGOS.main(args); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } }, Arrays.asList( new Command.Arg("peergos.identity.hash", "The hostname to listen on", true), new Command.Arg("domain", "The hostname to listen on", true, "localhost"), new Command.Arg("port", "The port for the local non tls server to listen on", true, "8000"), new Command.Arg("useIPFS", "Whether to use IPFS or a local datastore", true, "false"), new Command.Arg("mutable-pointers-file", "The filename for the mutable pointers (or :memory: or ram based)", true, ":memory:"), new Command.Arg("social-sql-file", "The filename for the follow requests (or :memory: or ram based)", true, ":memory:"), new Command.Arg("space-requests-sql-file", "The filename for the space requests datastore", true, "space-requests.sql"), new Command.Arg("ipfs-config-api-port", "ipfs api port", true, "5001"), new Command.Arg("ipfs-config-gateway-port", "ipfs gateway port", true, "8080"), new Command.Arg("pki.secret.key.path", "The path to the pki secret key file", true, "test.pki.secret.key"), new Command.Arg("pki.public.key.path", "The path to the pki public key file", true, "test.pki.public.key"), // Secret parameters new Command.Arg("pki.keyfile.password", "The password protecting the pki keyfile", true) ) ); public static final Command<FuseProcess> FUSE = new Command<>("fuse", "Mount a Peergos user's filesystem natively", Main::startFuse, Stream.of( new Command.Arg("username", "Peergos username", true), new Command.Arg("password", "Peergos password", true), new Command.Arg("webport", "Peergos service address port", false, "8000"), new Command.Arg("mountPoint", "The directory to mount the Peergos filesystem in", true, "peergos") ).collect(Collectors.toList()) ); public static final Command<Boolean> SHELL = new Command<>("shell", "An interactive command-line-interface to a Peergos server.", Main::startShell, Collections.emptyList() ); public static UserService startPeergos(Args a) { try { Crypto crypto = Crypto.initJava(); int webPort = a.getInt("port"); MultiAddress localPeergosApi = getLocalMultiAddress(webPort); a.setIfAbsent("proxy-target", localPeergosApi.toString()); boolean useIPFS = a.getBoolean("useIPFS"); IpfsWrapper ipfsWrapper = null; if (useIPFS) { ENSURE_IPFS_INSTALLED.main(a); ipfsWrapper = IPFS.main(a); } boolean doExportAggregatedMetrics = a.getBoolean("metrics.do_export"); if (doExportAggregatedMetrics) { int exporterPort = a.getInt("metrics.exporter_port"); AggregatedMetrics.startExporter(exporterPort); } Multihash pkiServerNodeId = Cid.decode(a.getArg("pki-node-id")); URL ipfsApiAddress = AddressUtil.getLocalAddress(a.getInt("ipfs-config-api-port")); URL ipfsGatewayAddress = AddressUtil.getLocalAddress(a.getInt("ipfs-config-gateway-port")); String domain = a.getArg("domain"); InetSocketAddress userAPIAddress = new InetSocketAddress(domain, webPort); int dhtCacheEntries = 1000; int maxValueSizeToCache = 50 * 1024; JavaPoster ipfsApi = new JavaPoster(ipfsApiAddress); JavaPoster ipfsGateway = new JavaPoster(ipfsGatewayAddress); ContentAddressedStorage localDht; if (useIPFS) { boolean enableGC = a.getBoolean("enable-gc", true); ContentAddressedStorage.HTTP ipfs = new ContentAddressedStorage.HTTP(ipfsApi, false); if (enableGC) { GarbageCollector gced = new GarbageCollector(ipfs, a.getInt("gc.period.millis", 60 * 60 * 1000)); gced.start(); localDht = new CachingStorage(gced, dhtCacheEntries, maxValueSizeToCache); } else localDht = new CachingStorage(ipfs, dhtCacheEntries, maxValueSizeToCache); } else localDht = new FileContentAddressedStorage(blockstorePath(a)); String hostname = a.getArg("domain"); Multihash nodeId = localDht.id().get(); boolean usePostgres = a.getBoolean("use-postgres", false); JdbcIpnsAndSocial.SqlSupplier sqlCommands = usePostgres ? new JdbcIpnsAndSocial.PostgresCommands() : new JdbcIpnsAndSocial.SqliteCommands(); Connection database; if (usePostgres) { String postgresHost = a.getArg("postgres.host"); int postgresPort = a.getInt("postgres.port", 5432); String databaseName = a.getArg("postgres.database", "peergos"); String postgresUsername = a.getArg("postgres.username"); String postgresPassword = a.getArg("postgres.password"); database = Postgres.build(postgresHost, postgresPort, databaseName, postgresUsername, postgresPassword); } else { database = Sqlite.build(Sqlite.getDbPath(a, "mutable-pointers-file")); } JdbcIpnsAndSocial rawPointers = new JdbcIpnsAndSocial(database, sqlCommands); MutablePointers localPointers = UserRepository.build(localDht, rawPointers); MutablePointersProxy proxingMutable = new HttpMutablePointers(ipfsGateway, pkiServerNodeId); PublicKeyHash peergosId = PublicKeyHash.fromString(a.getArg("peergos.identity.hash")); // build a mirroring proxying corenode, unless we are the pki node boolean isPkiNode = nodeId.equals(pkiServerNodeId); CoreNode core = isPkiNode ? buildPkiCorenode(new PinningMutablePointers(localPointers, localDht), localDht, a) : new MirrorCoreNode(new HTTPCoreNode(ipfsGateway, pkiServerNodeId), proxingMutable, localDht, peergosId, a.fromPeergosDir("pki-mirror-state-path","pki-state.cbor")); long defaultQuota = a.getLong("default-quota"); long maxUsers = a.getLong("max-users"); Logging.LOG().info("Using default user space quota of " + defaultQuota); Path quotaFilePath = a.fromPeergosDir("quotas_file","quotas.txt"); Path statePath = a.fromPeergosDir("state_path","usage-state.cbor"); Connection spaceDb = usePostgres ? database : Sqlite.build(Sqlite.getDbPath(a, "space-requests-sql-file")); JdbcSpaceRequests spaceRequests = JdbcSpaceRequests.build(spaceDb, sqlCommands); UserQuotas userQuotas = new UserQuotas(quotaFilePath, defaultQuota, maxUsers); CoreNode signupFilter = new SignUpFilter(core, userQuotas, nodeId); SpaceCheckingKeyFilter spaceChecker = new SpaceCheckingKeyFilter(core, localPointers, localDht, userQuotas, spaceRequests, statePath); CorenodeEventPropagator corePropagator = new CorenodeEventPropagator(signupFilter); corePropagator.addListener(spaceChecker::accept); MutableEventPropagator localMutable = new MutableEventPropagator(localPointers); localMutable.addListener(spaceChecker::accept); ContentAddressedStorage filteringDht = new WriteFilter(localDht, spaceChecker::allowWrite); ContentAddressedStorageProxy proxingDht = new ContentAddressedStorageProxy.HTTP(ipfsGateway); ContentAddressedStorage p2pDht = new ContentAddressedStorage.Proxying(filteringDht, proxingDht, nodeId, core); Path blacklistPath = a.fromPeergosDir("blacklist_file", "blacklist.txt"); PublicKeyBlackList blacklist = new UserBasedBlacklist(blacklistPath, core, localMutable, p2pDht); MutablePointers blockingMutablePointers = new BlockingMutablePointers(new PinningMutablePointers(localMutable, p2pDht), blacklist); MutablePointers p2mMutable = new ProxyingMutablePointers(nodeId, core, blockingMutablePointers, proxingMutable); SocialNetworkProxy httpSocial = new HttpSocialNetwork(ipfsGateway, ipfsGateway); Connection socialDatabase = usePostgres ? database : Sqlite.build(Sqlite.getDbPath(a, "social-sql-file")); JdbcIpnsAndSocial rawSocial = new JdbcIpnsAndSocial(socialDatabase, sqlCommands); SocialNetwork local = UserRepository.build(p2pDht, rawSocial); SocialNetwork p2pSocial = new ProxyingSocialNetwork(nodeId, core, local, httpSocial); Path userPath = a.fromPeergosDir("whitelist_file", "user_whitelist.txt"); int delayMs = a.getInt("whitelist_sleep_period", 1000 * 60 * 10); new UserFilePinner(userPath, core, p2mMutable, p2pDht, delayMs).start(); Set<String> adminUsernames = Arrays.asList(a.getArg("admin-usernames").split(",")) .stream() .collect(Collectors.toSet()); Admin storageAdmin = new Admin(adminUsernames, spaceRequests, userQuotas, core, localDht); HttpSpaceUsage httpSpaceUsage = new HttpSpaceUsage(ipfsGateway, ipfsGateway); ProxyingSpaceUsage p2pSpaceUsage = new ProxyingSpaceUsage(nodeId, corePropagator, spaceChecker, httpSpaceUsage); UserService peergos = new UserService(p2pDht, crypto, corePropagator, p2pSocial, p2mMutable, storageAdmin, p2pSpaceUsage); InetSocketAddress localAddress = new InetSocketAddress("localhost", userAPIAddress.getPort()); Optional<Path> webroot = a.hasArg("webroot") ? Optional.of(Paths.get(a.getArg("webroot"))) : Optional.empty(); boolean useWebAssetCache = a.getBoolean("webcache", true); Optional<String> tlsHostname = hostname.equals("localhost") ? Optional.empty() : Optional.of(hostname); Optional<UserService.TlsProperties> tlsProps = tlsHostname.map(host -> new UserService.TlsProperties(host, a.getArg("tls.keyfile.password"))); peergos.initAndStart(localAddress, tlsProps, webroot, useWebAssetCache); if (! isPkiNode) { int pkiNodeSwarmPort = a.getInt("pki.node.swarm.port"); InetAddress pkiNodeIpAddress = InetAddress.getByName(a.getArg("pki.node.ipaddress")); ipfsWrapper.connectToNode(new InetSocketAddress(pkiNodeIpAddress, pkiNodeSwarmPort), pkiServerNodeId); ((MirrorCoreNode) core).start(); } spaceChecker.calculateUsage(); if (a.hasArg("mirror.node.id")) { Multihash nodeToMirrorId = Cid.decode(a.getArg("mirror.node.id")); NetworkAccess localApi = NetworkAccess.buildJava(webPort).join(); new Thread(() -> { while (true) { try { Mirror.mirrorNode(nodeToMirrorId, localApi, rawPointers, localDht); try { Thread.sleep(60_000); } catch (InterruptedException f) {} } catch (Exception e) { e.printStackTrace(); try { Thread.sleep(5_000); } catch (InterruptedException f) {} } } }).start(); } if (a.hasArg("mirror.username")) { NetworkAccess localApi = NetworkAccess.buildJava(webPort).join(); new Thread(() -> { while (true) { try { Mirror.mirrorUser(a.getArg("mirror.username"), localApi, rawPointers, localDht); try { Thread.sleep(60_000); } catch (InterruptedException f) {} } catch (Exception e) { e.printStackTrace(); try { Thread.sleep(5_000); } catch (InterruptedException f) {} } } }).start(); } return peergos; } catch (Exception e) { throw new RuntimeException(e); } } public static FuseProcess startFuse(Args a) { String username = a.getArg("username"); String password = a.getArg("password"); int webPort = a.getInt("webport"); try { Files.createTempDirectory("peergos").toString(); } catch (IOException ioe) { throw new IllegalStateException(ioe); } String mountPath = a.getArg("mountPoint"); Path path = Paths.get(mountPath); path.toFile().mkdirs(); System.out.println("\n\nPeergos mounted at " + path + "\n\n"); try { NetworkAccess network = NetworkAccess.buildJava(webPort).get(); Crypto crypto = Crypto.initJava(); UserContext userContext = PeergosNetworkUtils.ensureSignedUp(username, password, network, crypto); PeergosFS peergosFS = new PeergosFS(userContext); FuseProcess fuseProcess = new FuseProcess(peergosFS, path); Runtime.getRuntime().addShutdownHook(new Thread(() -> fuseProcess.close(), "Fuse shutdown")); fuseProcess.start(); return fuseProcess; } catch (Exception ex) { throw new IllegalStateException(ex); } } public static IpfsWrapper startIpfs(Args a) { // test if ipfs is already running int ipfsApiPort = IpfsWrapper.getApiPort(a); if (IpfsWrapper.isHttpApiListening(ipfsApiPort)) { throw new IllegalStateException("IPFS is already running on api port " + ipfsApiPort); } IpfsWrapper ipfs = IpfsWrapper.build(a); if (a.getBoolean("ipfs-manage-runtime", true)) IpfsWrapper.launchAndManage(ipfs); else { IpfsWrapper.launchOnce(ipfs); } // wait for daemon to finish starting ipfs.waitForDaemon(10); return ipfs; } public static Boolean startShell(Args args) { CLI.main(new String[]{}); return true; } private static CoreNode buildPkiCorenode(MutablePointers mutable, ContentAddressedStorage dht, Args a) { try { Crypto crypto = Crypto.initJava(); PublicKeyHash peergosIdentity = PublicKeyHash.fromString(a.getArg("peergos.identity.hash")); String pkiSecretKeyfilePassword = a.getArg("pki.keyfile.password"); PublicSigningKey pkiPublic = PublicSigningKey.fromByteArray( Files.readAllBytes(a.fromPeergosDir("pki.public.key.path"))); SecretSigningKey pkiSecretKey = SecretSigningKey.fromCbor(CborObject.fromByteArray( PasswordProtected.decryptWithPassword( CborObject.fromByteArray(Files.readAllBytes(a.fromPeergosDir("pki.secret.key.path"))), pkiSecretKeyfilePassword, crypto.hasher, crypto.symmetricProvider, crypto.random ))); SigningKeyPair pkiKeys = new SigningKeyPair(pkiPublic, pkiSecretKey); PublicKeyHash pkiPublicHash = ContentAddressedStorage.hashKey(pkiKeys.publicSigningKey); MaybeMultihash currentPkiRoot = mutable.getPointerTarget(peergosIdentity, pkiPublicHash, dht).get(); SigningPrivateKeyAndPublicHash pkiSigner = new SigningPrivateKeyAndPublicHash(pkiPublicHash, pkiSecretKey); if (! currentPkiRoot.isPresent()) currentPkiRoot = IpfsTransaction.call(peergosIdentity, tid -> WriterData.createEmpty(peergosIdentity, pkiSigner, dht, tid).join() .commit(peergosIdentity, pkiSigner, MaybeMultihash.empty(), mutable, dht, tid) .thenApply(version -> version.get(pkiSigner).hash), dht).join(); return new IpfsCoreNode(pkiSigner, currentPkiRoot, dht, mutable, peergosIdentity); } catch (Exception e) { throw new RuntimeException(e); } } public static final Command<Void> MAIN = new Command<>("Main", "Run a Peergos command", args -> { System.out.println("Run with -help to show options"); return null; }, Collections.emptyList(), Arrays.asList( PKI_INIT, PKI, PEERGOS, FUSE, SHELL ) ); /** * Create path to local blockstore directory from Args. * * @param args * @return */ private static Path blockstorePath(Args args) { return args.fromPeergosDir("blockstore_dir", "blockstore"); } public static MultiAddress getLocalMultiAddress(int port) { return new MultiAddress("/ip4/127.0.0.1/tcp/" + port); } public static MultiAddress getLocalBootstrapAddress(int port, Multihash nodeId) { return new MultiAddress("/ip4/127.0.0.1/tcp/" + port + "/ipfs/"+ nodeId); } public static void main(String[] args) { MAIN.main(Args.parse(args)); } }
rename metrics args
src/peergos/server/Main.java
rename metrics args
Java
agpl-3.0
180607ed0789f8ac0316942aacd2c6124f5c6996
0
MarkehMe/FactionsPlus
package markehme.factionsplus; import java.io.*; import java.lang.reflect.*; import markehme.factionsplus.extras.*; import org.bukkit.configuration.file.*; import org.bukkit.plugin.*; public abstract class Config {//not named Conf so to avoid conflicts with com.massivecraft.factions.Conf //could use Plugin.getDataFolder() (tho no need) and move these to onEnable() or onLoad() else will likely NPE if using getDataFolder() public static final File folderBase= new File( "plugins" + File.separator + "FactionsPlus" );//just never be "" cause that means root folder public static final File folderWarps = new File( folderBase, "warps" ); public static final File folderJails = new File(folderBase, "jails" ); public static final File folderAnnouncements = new File( folderBase, "announcements" ); public static final File folderFRules = new File( folderBase, "frules" ); public static final File folderFBans = new File( folderBase, "fbans" ); public static final File fileDisableInWarzone = new File( folderBase, "disabled_in_warzone.txt"); public static File templatesFile = new File(folderBase , "templates.yml"); public static FileConfiguration templates; static final String fileConfigDefaults = "config_defaults.yml";//this file is located inside .jar in root dir //and it contains the defaults, so that they are no longer hardcoded in java code public static File fileConfig = new File(Config.folderBase , "config.yml"); public static FileConfiguration config; public static final String prefJails="jails"+Config.delim; public static final String confStr_enableJails = prefJails+"enableJails"; public static final String confStr_leadersCanSetJails = prefJails+"leadersCanSetJails"; public static final String confStr_officersCanSetJails = prefJails+"officersCanSetJails"; public static final String confStr_membersCanSetJails = prefJails+"membersCanSetJails"; public static final String confStr_leadersCanJail = prefJails+"leadersCanJail"; public static final String confStr_officersCanJail = prefJails+"officersCanJail"; //Begin Config String Pointers public static final String delim="."; public static final String prefWarps="warps"+delim; public static final String confStr_enableWarps = prefWarps+"enableWarps"; public static final String confStr_leadersCanSetWarps = prefWarps+"leadersCanSetWarps"; public static final String confStr_officersCanSetWarps = prefWarps+"officersCanSetWarps"; public static final String confStr_membersCanSetWarps = prefWarps+"membersCanSetWarps"; public static final String confStr_mustBeInOwnTerritoryToCreate = prefWarps+"mustBeInOwnTerritoryToCreate"; public static final String confStr_maxWarps = prefWarps+"maxWarps"; public static final String confStr_warpTeleportAllowedFromEnemyTerritory = prefWarps+"warpTeleportAllowedFromEnemyTerritory"; public static final String confStr_warpTeleportAllowedFromDifferentWorld = prefWarps+"warpTeleportAllowedFromDifferentWorld"; public static final String confStr_warpTeleportAllowedEnemyDistance = prefWarps+"warpTeleportAllowedEnemyDistance"; public static final String confStr_warpTeleportIgnoreEnemiesIfInOwnTerritory = prefWarps+"warpTeleportIgnoreEnemiesIfInOwnTerritory"; public static final String confStr_smokeEffectOnWarp = prefWarps+"smokeEffectOnWarp"; public static final String prefBanning="banning"+delim; public static final String confStr_enableBans=prefBanning+"enableBans"; public static final String confStr_leadersCanFactionBan=prefBanning+"leadersCanFactionBan"; public static final String confStr_officersCanFactionBan=prefBanning+"officersCanFactionBan"; public static final String confStr_leadersCanFactionUnban=prefBanning+"leadersCanFactionUnban"; public static final String confStr_officersCanFactionUnban=prefBanning+"officersCanFactionUnban"; public static final String confStr_leaderCanNotBeBanned=prefBanning+"leaderCanNotBeBanned"; public static final String prefRules="rules"+delim; public static final String confStr_enableRules=prefRules+"enableRules"; public static final String confStr_leadersCanSetRules=prefRules+"leadersCanSetRules"; public static final String confStr_officersCanSetRules=prefRules+"officersCanSetRules"; public static final String confStr_maxRulesPerFaction=prefRules+"maxRulesPerFaction"; public static final String prefPeaceful="peaceful"+delim; public static final String confStr_leadersCanToggleState=prefPeaceful+"leadersCanToggleState"; public static final String confStr_officersCanToggleState=prefPeaceful+"officersCanToggleState"; public static final String confStr_membersCanToggleState=prefPeaceful+"membersCanToggleState"; public static final String confStr_enablePeacefulBoosts=prefPeaceful+"enablePeacefulBoosts"; public static final String confStr_powerBoostIfPeaceful=prefPeaceful+"powerBoostIfPeaceful"; public static final String prefPowerboosts="powerboosts"+delim; public static final String confStr_enablePowerBoosts=prefPowerboosts+"enablePowerBoosts"; public static final String confStr_extraPowerWhenKillPlayer=prefPowerboosts+"extraPowerWhenKillPlayer"; public static final String confStr_extraPowerLossIfDeathBySuicide=prefPowerboosts+"extraPowerLossIfDeathBySuicide"; public static final String confStr_extraPowerLossIfDeathByPVP=prefPowerboosts+"extraPowerLossIfDeathByPVP"; public static final String confStr_extraPowerLossIfDeathByMob=prefPowerboosts+"extraPowerLossIfDeathByMob"; public static final String confStr_extraPowerLossIfDeathByCactus=prefPowerboosts+"extraPowerLossIfDeathByCactus"; public static final String confStr_extraPowerLossIfDeathByTNT=prefPowerboosts+"extraPowerLossIfDeathByTNT"; public static final String confStr_extraPowerLossIfDeathByFire=prefPowerboosts+"extraPowerLossIfDeathByFire"; public static final String confStr_extraPowerLossIfDeathByPotion=prefPowerboosts+"extraPowerLossIfDeathByPotion"; public static final String confStr_extraPowerLossIfDeathByOther=prefPowerboosts+"extraPowerLossIfDeathByOther"; public static final String prefAnnounce="announce"+delim; public static final String confStr_enableAnnounce=prefAnnounce+"enableAnnounce"; public static final String confStr_leadersCanAnnounce=prefAnnounce+"leadersCanAnnounce"; public static final String confStr_officersCanAnnounce=prefAnnounce+"officersCanAnnounce"; public static final String confStr_showLastAnnounceOnLogin=prefAnnounce+"showLastAnnounceOnLogin"; public static final String confStr_showLastAnnounceOnLandEnter=prefAnnounce+"showLastAnnounceOnLandEnter"; public static final String prefEconomy="economy"+delim; public static final String confStr_enableEconomy=prefEconomy+"enableEconomy"; public static final String confStr_economyCostToWarp=prefEconomy+"economyCostToWarp"; public static final String confStr_economyCostToCreateWarp=prefEconomy+"economyCostToCreateWarp"; public static final String confStr_economyCostToDeleteWarp=prefEconomy+"economyCostToDeleteWarp"; public static final String confStr_economyCostToAnnounce=prefEconomy+"economyCostToAnnounce"; public static final String confStr_economyCostToJail=prefEconomy+"economyCostToJail"; public static final String confStr_economyCostToSetJail=prefEconomy+"economyCostToSetJail"; public static final String confStr_economyCostToUnJail=prefEconomy+"economyCostToUnJail"; public static final String confStr_economyCostToToggleUpPeaceful=prefEconomy+"economyCostToToggleUpPeaceful"; public static final String confStr_economyCostToToggleDownPeaceful=prefEconomy+"economyCostToToggleDownPeaceful"; public static final String prefTeleports="Teleports"+delim; public static final String confStr_disallowTeleportingToEnemyLandViaHomeCommand= prefTeleports+"disallowTeleportingToEnemyLandViaHomeCommand"; public static final String confStr_reportSuccessfulByCommandTeleportsIntoEnemyLand=prefTeleports+"reportSuccessfulByCommandTeleportsIntoEnemyLand"; public static final String confStr_disallowTeleportingToEnemyLandViaEnderPeals=prefTeleports+"disallowTeleportingToEnemyLandViaEnderPeals"; public static final String prefExtras="extras"+delim; public static final String confStr_disableUpdateCheck=prefExtras+"disableUpdateCheck"; public static final String prefExtrasLWC=prefExtras+"LWC"+delim; public static final String confStr_removeLWCLocksOnClaim=prefExtrasLWC+"removeLWCLocksOnClaim"; public static final String confStr_blockCPublicAccessOnNonOwnFactionTerritory=prefExtrasLWC+"blockCPublicAccessOnNonOwnFactionTerritory"; public static final String prefExtrasMD=prefExtras+"disguise"+delim; public static final String confStr_enableDisguiseIntegration=prefExtrasMD+"enableDisguiseIntegration"; public static final String confStr_unDisguiseIfInOwnTerritory=prefExtrasMD+"unDisguiseIfInOwnTerritory"; public static final String confStr_unDisguiseIfInEnemyTerritory=prefExtrasMD+"unDisguiseIfInEnemyTerritory"; public static final String confStr_DoNotChangeMe="DoNotChangeMe"; //End Config String Pointer private static File currentFolder_OnPluginClassInit; private static File currentFolder_OnEnable=null; /** * call this in plugin.onLoad (the thing that happens before onEnable() ) * @param plugin */ protected static void onLoad() { boolean failed = false; try { if ( Q.isInconsistencyFileBug() ) { throw FactionsPlusPlugin .bailOut( "Please do not have `user.dir` property set, it will mess up so many things" + "(or did you use native functions to change current folder from the one that was on jvm startup?!)" ); } if ( hasFileFieldsTrap() ) { throw FactionsPlusPlugin.bailOut( "there is a coding trap which will likely cause unexpected behaviour " + "in places that use files, tell plugin author to fix" ); } } catch ( Throwable t ) { failed = true; Q.rethrow( t ); } finally { if ( failed ) { FactionsPlus.instance.setDisAllowPluginToEnable(); } } } /** * make sure all the File fields in this class that are likely used somewhere else in constructors like new File(field, myfile); * are non-empty to avoid 'myfile' being in root of drive instead of just current folder as expected<br> * this would cause some evil inconsistencies if any of those fields would resolve to empty paths<br> */ private static boolean hasFileFieldsTrap() { Class classToCheckFor_FileFields = Config.class; Field[] allFields = classToCheckFor_FileFields.getFields(); for ( Field field : allFields ) { if (File.class.equals( field.getType())) { //got one File field to check try { File instance = (File)field.get( classToCheckFor_FileFields ); if (instance.getPath().isEmpty()) { //oops, found one, to avoid traps where you expect new File( instance, yourfile); // to have 'yourfile' in root folder of that drive ie. '\yourfile' instead of what you might //expect "yourfile" to be just in current folder just like a new File(yourfile) would do return true; } } catch ( IllegalArgumentException e ) { Q.rethrow(e); } catch ( IllegalAccessException e ) { Q.rethrow(e); } } } return false; } /** * called on plugin.onEnable() and every time you want the config to reload */ protected static void reload() { Config.config=null;//must be here to cause config to reload on every plugin(s) reload from console } }
src/markehme/factionsplus/Config.java
package markehme.factionsplus; import java.io.*; import java.lang.reflect.*; import markehme.factionsplus.extras.*; import org.bukkit.configuration.file.*; import org.bukkit.plugin.*; public abstract class Config { //could use Plugin.getDataFolder() (tho no need) and move these to onEnable() or onLoad() else will likely NPE if using getDataFolder() public static final File folderBase= new File( "plugins" + File.separator + "FactionsPlus" );//just never be "" cause that means root folder public static final File folderWarps = new File( folderBase, "warps" ); public static final File folderJails = new File("" );//folderBase, "jails" ); public static final File folderAnnouncements = new File( folderBase, "announcements" ); public static final File folderFRules = new File( folderBase, "frules" ); public static final File folderFBans = new File( folderBase, "fbans" ); public static final File fileDisableInWarzone = new File( folderBase, "disabled_in_warzone.txt"); public static File templatesFile = new File(folderBase , "templates.yml"); public static FileConfiguration templates; static final String fileConfigDefaults = "config_defaults.yml";//this file is located inside .jar in root dir //and it contains the defaults, so that they are no longer hardcoded in java code public static File fileConfig = new File(Config.folderBase , "config.yml"); public static FileConfiguration config;//not named Conf so to avoid conflicts with com.massivecraft.factions.Conf public static final String prefJails="jails"+Config.delim; public static final String confStr_enableJails = prefJails+"enableJails"; public static final String confStr_leadersCanSetJails = prefJails+"leadersCanSetJails"; public static final String confStr_officersCanSetJails = prefJails+"officersCanSetJails"; public static final String confStr_membersCanSetJails = prefJails+"membersCanSetJails"; public static final String confStr_leadersCanJail = prefJails+"leadersCanJail"; public static final String confStr_officersCanJail = prefJails+"officersCanJail"; //Begin Config String Pointers public static final String delim="."; public static final String prefWarps="warps"+delim; public static final String confStr_enableWarps = prefWarps+"enableWarps"; public static final String confStr_leadersCanSetWarps = prefWarps+"leadersCanSetWarps"; public static final String confStr_officersCanSetWarps = prefWarps+"officersCanSetWarps"; public static final String confStr_membersCanSetWarps = prefWarps+"membersCanSetWarps"; public static final String confStr_mustBeInOwnTerritoryToCreate = prefWarps+"mustBeInOwnTerritoryToCreate"; public static final String confStr_maxWarps = prefWarps+"maxWarps"; public static final String confStr_warpTeleportAllowedFromEnemyTerritory = prefWarps+"warpTeleportAllowedFromEnemyTerritory"; public static final String confStr_warpTeleportAllowedFromDifferentWorld = prefWarps+"warpTeleportAllowedFromDifferentWorld"; public static final String confStr_warpTeleportAllowedEnemyDistance = prefWarps+"warpTeleportAllowedEnemyDistance"; public static final String confStr_warpTeleportIgnoreEnemiesIfInOwnTerritory = prefWarps+"warpTeleportIgnoreEnemiesIfInOwnTerritory"; public static final String confStr_smokeEffectOnWarp = prefWarps+"smokeEffectOnWarp"; public static final String prefBanning="banning"+delim; public static final String confStr_enableBans=prefBanning+"enableBans"; public static final String confStr_leadersCanFactionBan=prefBanning+"leadersCanFactionBan"; public static final String confStr_officersCanFactionBan=prefBanning+"officersCanFactionBan"; public static final String confStr_leadersCanFactionUnban=prefBanning+"leadersCanFactionUnban"; public static final String confStr_officersCanFactionUnban=prefBanning+"officersCanFactionUnban"; public static final String confStr_leaderCanNotBeBanned=prefBanning+"leaderCanNotBeBanned"; public static final String prefRules="rules"+delim; public static final String confStr_enableRules=prefRules+"enableRules"; public static final String confStr_leadersCanSetRules=prefRules+"leadersCanSetRules"; public static final String confStr_officersCanSetRules=prefRules+"officersCanSetRules"; public static final String confStr_maxRulesPerFaction=prefRules+"maxRulesPerFaction"; public static final String prefPeaceful="peaceful"+delim; public static final String confStr_leadersCanToggleState=prefPeaceful+"leadersCanToggleState"; public static final String confStr_officersCanToggleState=prefPeaceful+"officersCanToggleState"; public static final String confStr_membersCanToggleState=prefPeaceful+"membersCanToggleState"; public static final String confStr_enablePeacefulBoosts=prefPeaceful+"enablePeacefulBoosts"; public static final String confStr_powerBoostIfPeaceful=prefPeaceful+"powerBoostIfPeaceful"; public static final String prefPowerboosts="powerboosts"+delim; public static final String confStr_enablePowerBoosts=prefPowerboosts+"enablePowerBoosts"; public static final String confStr_extraPowerWhenKillPlayer=prefPowerboosts+"extraPowerWhenKillPlayer"; public static final String confStr_extraPowerLossIfDeathBySuicide=prefPowerboosts+"extraPowerLossIfDeathBySuicide"; public static final String confStr_extraPowerLossIfDeathByPVP=prefPowerboosts+"extraPowerLossIfDeathByPVP"; public static final String confStr_extraPowerLossIfDeathByMob=prefPowerboosts+"extraPowerLossIfDeathByMob"; public static final String confStr_extraPowerLossIfDeathByCactus=prefPowerboosts+"extraPowerLossIfDeathByCactus"; public static final String confStr_extraPowerLossIfDeathByTNT=prefPowerboosts+"extraPowerLossIfDeathByTNT"; public static final String confStr_extraPowerLossIfDeathByFire=prefPowerboosts+"extraPowerLossIfDeathByFire"; public static final String confStr_extraPowerLossIfDeathByPotion=prefPowerboosts+"extraPowerLossIfDeathByPotion"; public static final String confStr_extraPowerLossIfDeathByOther=prefPowerboosts+"extraPowerLossIfDeathByOther"; public static final String prefAnnounce="announce"+delim; public static final String confStr_enableAnnounce=prefAnnounce+"enableAnnounce"; public static final String confStr_leadersCanAnnounce=prefAnnounce+"leadersCanAnnounce"; public static final String confStr_officersCanAnnounce=prefAnnounce+"officersCanAnnounce"; public static final String confStr_showLastAnnounceOnLogin=prefAnnounce+"showLastAnnounceOnLogin"; public static final String confStr_showLastAnnounceOnLandEnter=prefAnnounce+"showLastAnnounceOnLandEnter"; public static final String prefEconomy="economy"+delim; public static final String confStr_enableEconomy=prefEconomy+"enableEconomy"; public static final String confStr_economyCostToWarp=prefEconomy+"economyCostToWarp"; public static final String confStr_economyCostToCreateWarp=prefEconomy+"economyCostToCreateWarp"; public static final String confStr_economyCostToDeleteWarp=prefEconomy+"economyCostToDeleteWarp"; public static final String confStr_economyCostToAnnounce=prefEconomy+"economyCostToAnnounce"; public static final String confStr_economyCostToJail=prefEconomy+"economyCostToJail"; public static final String confStr_economyCostToSetJail=prefEconomy+"economyCostToSetJail"; public static final String confStr_economyCostToUnJail=prefEconomy+"economyCostToUnJail"; public static final String confStr_economyCostToToggleUpPeaceful=prefEconomy+"economyCostToToggleUpPeaceful"; public static final String confStr_economyCostToToggleDownPeaceful=prefEconomy+"economyCostToToggleDownPeaceful"; public static final String prefTeleports="Teleports"+delim; public static final String confStr_disallowTeleportingToEnemyLandViaHomeCommand= prefTeleports+"disallowTeleportingToEnemyLandViaHomeCommand"; public static final String confStr_reportSuccessfulByCommandTeleportsIntoEnemyLand=prefTeleports+"reportSuccessfulByCommandTeleportsIntoEnemyLand"; public static final String confStr_disallowTeleportingToEnemyLandViaEnderPeals=prefTeleports+"disallowTeleportingToEnemyLandViaEnderPeals"; public static final String prefExtras="extras"+delim; public static final String confStr_disableUpdateCheck=prefExtras+"disableUpdateCheck"; public static final String prefExtrasLWC=prefExtras+"LWC"+delim; public static final String confStr_removeLWCLocksOnClaim=prefExtrasLWC+"removeLWCLocksOnClaim"; public static final String confStr_blockCPublicAccessOnNonOwnFactionTerritory=prefExtrasLWC+"blockCPublicAccessOnNonOwnFactionTerritory"; public static final String prefExtrasMD=prefExtras+"disguise"+delim; public static final String confStr_enableDisguiseIntegration=prefExtrasMD+"enableDisguiseIntegration"; public static final String confStr_unDisguiseIfInOwnTerritory=prefExtrasMD+"unDisguiseIfInOwnTerritory"; public static final String confStr_unDisguiseIfInEnemyTerritory=prefExtrasMD+"unDisguiseIfInEnemyTerritory"; public static final String confStr_DoNotChangeMe="DoNotChangeMe"; //End Config String Pointer private static File currentFolder_OnPluginClassInit; private static File currentFolder_OnEnable=null; /** * call this in plugin.onLoad (the thing that happens before onEnable() ) * @param plugin */ protected static void onLoad() { boolean failed = false; try { if ( Q.isInconsistencyFileBug() ) { throw FactionsPlusPlugin .bailOut( "Please do not have `user.dir` property set, it will mess up so many things" + "(or did you use native functions to change current folder from the one that was on jvm startup?!)" ); } if ( hasFileFieldsTrap() ) { throw FactionsPlusPlugin.bailOut( "there is a coding trap which will likely cause unexpected behaviour " + "in places that use files, tell plugin author to fix" ); } } catch ( Throwable t ) { failed = true; Q.rethrow( t ); } finally { if ( failed ) { FactionsPlus.instance.setDisAllowPluginToEnable(); } } } /** * make sure all the File fields in this class that are likely used somewhere else in constructors like new File(field, myfile); * are non-empty to avoid 'myfile' being in root of drive instead of just current folder as expected<br> * this would cause some evil inconsistencies if any of those fields would resolve to empty paths<br> */ private static boolean hasFileFieldsTrap() { Class classToCheckFor_FileFields = Config.class; Field[] allFields = classToCheckFor_FileFields.getFields(); for ( Field field : allFields ) { if (File.class.equals( field.getType())) { //got one File field to check try { File instance = (File)field.get( classToCheckFor_FileFields ); if (instance.getPath().isEmpty()) { //oops, found one, to avoid traps where you expect new File( instance, yourfile); // to have 'yourfile' in root folder of that drive ie. '\yourfile' instead of what you might //expect "yourfile" to be just in current folder just like a new File(yourfile) would do return true; } } catch ( IllegalArgumentException e ) { Q.rethrow(e); } catch ( IllegalAccessException e ) { Q.rethrow(e); } } } return false; } /** * called on plugin.onEnable() and every time you want the config to reload */ protected static void reload() { Config.config=null;//must be here to cause config to reload on every plugin(s) reload from console } }
fixed to not err
src/markehme/factionsplus/Config.java
fixed to not err
Java
lgpl-2.1
091ed007df0abb447b9ce294861d2e01e1b259f5
0
julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine
package org.intermine.web; /* * Copyright (C) 2002-2007 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.intermine.metadata.Model; import org.intermine.model.InterMineObject; import org.intermine.objectstore.ObjectStoreWriter; import org.intermine.web.logic.bag.IdUpgrader; import org.intermine.web.logic.bag.InterMineBagHandler; import org.intermine.web.logic.profile.Profile; import org.intermine.web.logic.profile.ProfileManager; import org.intermine.web.logic.query.SavedQueryHandler; import org.intermine.web.logic.tagging.TagHandler; import org.intermine.web.logic.template.TemplateQueryHandler; import org.intermine.xml.full.FullHandler; import org.intermine.xml.full.FullParser; import org.intermine.xml.full.Item; import javax.servlet.ServletContext; import org.apache.log4j.Logger; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; /** * Extension of DefaultHandler to handle parsing Profiles * * @author Kim Rutherford */ class ProfileHandler extends DefaultHandler { private static final Logger LOG = Logger.getLogger(ProfileHandler.class); private ProfileManager profileManager; private String username; private String password; private Map savedQueries, classKeys; private Map savedBags; private Map savedTemplates; private Set tags; private List<Item> items; private Map<Integer, InterMineObject> idObjectMap; private IdUpgrader idUpgrader; private ObjectStoreWriter osw; /** * The current child handler. If we have just seen a "bags" element, it will be an * InterMineBagBinding.InterMineBagHandler. If "template-queries" it will be an * TemplateQueryBinding.TemplateQueryHandler. If "queries" it will be a * PathQueryBinding.PathQueryHandler. If subHandler is not null subHandler.startElement() and * subHandler.endElement(), etc will be called from this class. */ DefaultHandler subHandler = null; private final ServletContext servletContext; private boolean abortOnError; /** * Create a new ProfileHandler * @param profileManager the ProfileManager to pass to the Profile constructor * @param idUpgrader the IdUpgrader to use to find objects in the new ObjectStore that * correspond to object in old bags. * @param servletContext global ServletContext object * @param osw an ObjectStoreWriter to the production database, to write bags * @param abortOnError if true, throw an exception if there is a problem. If false, log the * problem and continue if possible (used by read-userprofile-xml). */ public ProfileHandler(ProfileManager profileManager, IdUpgrader idUpgrader, ServletContext servletContext, ObjectStoreWriter osw, boolean abortOnError) { this(profileManager, idUpgrader, null, null, new HashSet(), servletContext, osw, abortOnError); } /** * Create a new ProfileHandler * @param profileManager the ProfileManager to pass to the Profile constructor * @param idUpgrader the IdUpgrader to use to find objects in the new ObjectStore that * correspond to object in old bags. * @param defaultUsername default username * @param defaultPassword default password * @param tags a set to populate with user tags * @param servletContext global ServletContext object * @param osw an ObjectStoreWriter to the production database, to write bags * @param abortOnError if true, throw an exception if there is a problem. If false, log the * problem and continue if possible (used by read-userprofile-xml). */ public ProfileHandler(ProfileManager profileManager, IdUpgrader idUpgrader, String defaultUsername, String defaultPassword, Set tags, ServletContext servletContext, ObjectStoreWriter osw, boolean abortOnError) { super(); this.profileManager = profileManager; this.idUpgrader = idUpgrader; this.servletContext = servletContext; items = new ArrayList(); this.username = defaultUsername; this.password = defaultPassword; this.tags = tags; this.classKeys = classKeys; this.osw = osw; this.abortOnError = abortOnError; } /** * Create a new ProfileHandler. Throw an exception if there is a problem while reading * @param profileManager the ProfileManager to pass to the Profile constructor * @param idUpgrader the IdUpgrader to use to find objects in the new ObjectStore that * correspond to object in old bags. * @param defaultUsername default username * @param defaultPassword default password * @param tags a set to populate with user tags * @param servletContext global ServletContext object * @param osw an ObjectStoreWriter to the production database, to write bags */ public ProfileHandler(ProfileManager profileManager, IdUpgrader idUpgrader, String defaultUsername, String defaultPassword, Set tags, ServletContext servletContext, ObjectStoreWriter osw) { this(profileManager, idUpgrader, defaultPassword, defaultPassword, tags, servletContext, osw, true); } /** * Return the de-serialised Profile. * @return the new Profile */ public Profile getProfile() { Profile retval = new Profile(profileManager, username, null, password, savedQueries, savedBags, savedTemplates); return retval; } /** * Return a set of Tag objects to add to the Profile. * @return the set Tags */ public Set getTags() { return tags; } /** * {@inheritDoc} */ public void startElement(String uri, String localName, String qName, Attributes attrs) throws SAXException { if (qName.equals("userprofile")) { if (attrs.getValue("username") != null) { username = attrs.getValue("username"); } if (attrs.getValue("password") != null) { password = attrs.getValue("password"); } } if (qName.equals("items")) { subHandler = new FullHandler(); } if (qName.equals("bags")) { savedBags = new LinkedHashMap(); subHandler = new InterMineBagHandler(profileManager.getUserProfileObjectStore(), osw, savedBags, null, idObjectMap, idUpgrader); } if (qName.equals("template-queries")) { savedTemplates = new LinkedHashMap(); subHandler = new TemplateQueryHandler(savedTemplates, savedBags, servletContext); } if (qName.equals("queries")) { savedQueries = new LinkedHashMap(); subHandler = new SavedQueryHandler(savedQueries, savedBags, servletContext); } if (qName.equals("tags")) { subHandler = new TagHandler(username, tags); } if (subHandler != null) { subHandler.startElement(uri, localName, qName, attrs); } } /** * {@inheritDoc} */ public void endElement(String uri, String localName, String qName) throws SAXException { super.endElement(uri, localName, qName); if (qName.equals("items")) { items = ((FullHandler) subHandler).getItems(); idObjectMap = new HashMap(); Model model = profileManager.getObjectStore().getModel(); List<InterMineObject> objects; try { objects = FullParser.realiseObjects(items, model, true, false); } catch (ClassNotFoundException e) { throw new RuntimeException("unexpected exception", e); } for (InterMineObject object: objects) { idObjectMap.put(object.getId(), object); } } if (qName.equals("bags") || qName.equals("template-queries") || qName.equals("queries") || qName.equals("items") || qName.equals("tags")) { subHandler = null; } if (subHandler != null) { subHandler.endElement(uri, localName, qName); } } }
intermine/webtasks/main/src/org/intermine/web/ProfileHandler.java
package org.intermine.web; /* * Copyright (C) 2002-2007 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.intermine.metadata.Model; import org.intermine.model.InterMineObject; import org.intermine.objectstore.ObjectStoreWriter; import org.intermine.web.logic.bag.IdUpgrader; import org.intermine.web.logic.bag.InterMineBagHandler; import org.intermine.web.logic.profile.Profile; import org.intermine.web.logic.profile.ProfileManager; import org.intermine.web.logic.query.SavedQueryHandler; import org.intermine.web.logic.tagging.TagHandler; import org.intermine.web.logic.template.TemplateQueryHandler; import org.intermine.xml.full.FullHandler; import org.intermine.xml.full.FullParser; import org.intermine.xml.full.Item; import javax.servlet.ServletContext; import org.apache.log4j.Logger; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; /** * Extension of DefaultHandler to handle parsing Profiles * * @author Kim Rutherford */ class ProfileHandler extends DefaultHandler { private static final Logger LOG = Logger.getLogger(ProfileHandler.class); private ProfileManager profileManager; private String username; private String password; private Map savedQueries, classKeys; private Map savedBags; private Map savedTemplates; private Set tags; private List<Item> items; private Map idObjectMap; private IdUpgrader idUpgrader; private ObjectStoreWriter osw; /** * The current child handler. If we have just seen a "bags" element, it will be an * InterMineBagBinding.InterMineBagHandler. If "template-queries" it will be an * TemplateQueryBinding.TemplateQueryHandler. If "queries" it will be a * PathQueryBinding.PathQueryHandler. If subHandler is not null subHandler.startElement() and * subHandler.endElement(), etc will be called from this class. */ DefaultHandler subHandler = null; private final ServletContext servletContext; private boolean abortOnError; /** * Create a new ProfileHandler * @param profileManager the ProfileManager to pass to the Profile constructor * @param idUpgrader the IdUpgrader to use to find objects in the new ObjectStore that * correspond to object in old bags. * @param servletContext global ServletContext object * @param osw an ObjectStoreWriter to the production database, to write bags * @param abortOnError if true, throw an exception if there is a problem. If false, log the * problem and continue if possible (used by read-userprofile-xml). */ public ProfileHandler(ProfileManager profileManager, IdUpgrader idUpgrader, ServletContext servletContext, ObjectStoreWriter osw, boolean abortOnError) { this(profileManager, idUpgrader, null, null, new HashSet(), servletContext, osw, abortOnError); } /** * Create a new ProfileHandler * @param profileManager the ProfileManager to pass to the Profile constructor * @param idUpgrader the IdUpgrader to use to find objects in the new ObjectStore that * correspond to object in old bags. * @param defaultUsername default username * @param defaultPassword default password * @param tags a set to populate with user tags * @param servletContext global ServletContext object * @param osw an ObjectStoreWriter to the production database, to write bags * @param abortOnError if true, throw an exception if there is a problem. If false, log the * problem and continue if possible (used by read-userprofile-xml). */ public ProfileHandler(ProfileManager profileManager, IdUpgrader idUpgrader, String defaultUsername, String defaultPassword, Set tags, ServletContext servletContext, ObjectStoreWriter osw, boolean abortOnError) { super(); this.profileManager = profileManager; this.idUpgrader = idUpgrader; this.servletContext = servletContext; items = new ArrayList(); this.username = defaultUsername; this.password = defaultPassword; this.tags = tags; this.classKeys = classKeys; this.osw = osw; this.abortOnError = abortOnError; } /** * Create a new ProfileHandler. Throw an exception if there is a problem while reading * @param profileManager the ProfileManager to pass to the Profile constructor * @param idUpgrader the IdUpgrader to use to find objects in the new ObjectStore that * correspond to object in old bags. * @param defaultUsername default username * @param defaultPassword default password * @param tags a set to populate with user tags * @param servletContext global ServletContext object * @param osw an ObjectStoreWriter to the production database, to write bags */ public ProfileHandler(ProfileManager profileManager, IdUpgrader idUpgrader, String defaultUsername, String defaultPassword, Set tags, ServletContext servletContext, ObjectStoreWriter osw) { this(profileManager, idUpgrader, defaultPassword, defaultPassword, tags, servletContext, osw, true); } /** * Return the de-serialised Profile. * @return the new Profile */ public Profile getProfile() { Profile retval = new Profile(profileManager, username, null, password, savedQueries, savedBags, savedTemplates); return retval; } /** * Return a set of Tag objects to add to the Profile. * @return the set Tags */ public Set getTags() { return tags; } /** * {@inheritDoc} */ public void startElement(String uri, String localName, String qName, Attributes attrs) throws SAXException { if (qName.equals("userprofile")) { if (attrs.getValue("username") != null) { username = attrs.getValue("username"); } if (attrs.getValue("password") != null) { password = attrs.getValue("password"); } } if (qName.equals("items")) { subHandler = new FullHandler(); } if (qName.equals("bags")) { savedBags = new LinkedHashMap(); subHandler = new InterMineBagHandler(profileManager.getUserProfileObjectStore(), osw, savedBags, null, idObjectMap, idUpgrader); } if (qName.equals("template-queries")) { savedTemplates = new LinkedHashMap(); subHandler = new TemplateQueryHandler(savedTemplates, savedBags, servletContext); } if (qName.equals("queries")) { savedQueries = new LinkedHashMap(); subHandler = new SavedQueryHandler(savedQueries, savedBags, servletContext); } if (qName.equals("tags")) { subHandler = new TagHandler(username, tags); } if (subHandler != null) { subHandler.startElement(uri, localName, qName, attrs); } } /** * {@inheritDoc} */ public void endElement(String uri, String localName, String qName) throws SAXException { super.endElement(uri, localName, qName); if (qName.equals("items")) { items = ((FullHandler) subHandler).getItems(); idObjectMap = new HashMap(); Model model = profileManager.getObjectStore().getModel(); List<InterMineObject> objects = new ArrayList(); for (Item item: items) { try { List<Item> oneItemList = new ArrayList<Item>(); oneItemList.add(item); objects.addAll(FullParser.realiseObjects(oneItemList, model, true, false)); } catch (ClassNotFoundException e) { String message = "cannot turn item into object"; if (abortOnError) { throw new RuntimeException(message, e); } else { LOG.warn(message + ": " + item); } } } Iterator objectIter = objects.iterator(); while (objectIter.hasNext()) { InterMineObject object = (InterMineObject) objectIter.next(); idObjectMap.put(object.getId(), object); } } if (qName.equals("bags") || qName.equals("template-queries") || qName.equals("queries") || qName.equals("items") || qName.equals("tags")) { subHandler = null; } if (subHandler != null) { subHandler.endElement(uri, localName, qName); } } }
Reverted part of [11949] to allow read-userprofile-xml to cope (again) with objects with multipart keys that reference another object. Former-commit-id: 32569ae989b603a9aa931099b88bbb2797398571
intermine/webtasks/main/src/org/intermine/web/ProfileHandler.java
Reverted part of [11949] to allow read-userprofile-xml to cope (again) with objects with multipart keys that reference another object.
Java
lgpl-2.1
c33bb772b28768892a28ece80d649e879020d5a3
0
neoedmund/jediterm,neoedmund/jediterm
package com.jediterm.terminal.emulator; import com.jediterm.terminal.TerminalColor; import java.awt.*; /** * @author traff */ @SuppressWarnings("UseJBColor") public abstract class ColorPalette { public static final ColorPalette XTERM_PALETTE = new ColorPalette() { @Override public Color[] getIndexColors() { return new Color[]{ new Color(0x000000), //Black new Color(0xcd0000), //Red new Color(0x00cd00), //Green new Color(0xcdcd00), //Yellow new Color(0x1e90ff), //Blue new Color(0xcd00cd), //Magenta new Color(0x00cdcd), //Cyan new Color(0xe5e5e5), //White //Bright versions of the ISO colors new Color(0x4c4c4c), //Black new Color(0xff0000), //Red new Color(0x00ff00), //Green new Color(0xffff00), //Yellow new Color(0x4682b4), //Blue new Color(0xff00ff), //Magenta new Color(0x00ffff), //Cyan new Color(0xffffff), //White }; } }; public static final ColorPalette WINDOWS_PALETTE = new ColorPalette() { @Override public Color[] getIndexColors() { return new Color[]{ new Color(0x000000), //Black new Color(0x800000), //Red new Color(0x008000), //Green new Color(0x808000), //Yellow new Color(0x000080), //Blue new Color(0x800080), //Magenta new Color(0x008080), //Cyan new Color(0xc0c0c0), //White //Bright versions of the ISO colors new Color(0x808080), //Black new Color(0xff0000), //Red new Color(0x00ff00), //Green new Color(0xffff00), //Yellow new Color(0x4682b4), //Blue new Color(0xff00ff), //Magenta new Color(0x00ffff), //Cyan new Color(0xffffff), //White }; } }; public abstract Color[] getIndexColors(); public Color getColor(TerminalColor color) { if (color.isIndexed()) { return getIndexColors()[color.getIndex()]; } else { return color.toAwtColor(); } } public static TerminalColor getIndexedColor(int index) { return (index < 16) ? TerminalColor.index(index) : getXTerm256(index); } private static TerminalColor getXTerm256(int index) { return index < 256 ? COL_RES_256[index - 16] : null; } //The code below is translation of xterm's 256colres.pl // colors 16-231 are a 6x6x6 color cube private static final TerminalColor[] COL_RES_256 = new TerminalColor[240]; static { for (int red = 0; red < 6; ++red) { for (int green = 0; green < 6; ++green) { for (int blue = 0; blue < 6; ++blue) { int code = 36 * red + 6 * green + blue; COL_RES_256[code] = new TerminalColor(red > 0 ? (40 * red + 55) : 0, green > 0 ? (40 * green + 55) : 0, blue > 0 ? (40 * blue + 55) : 0); } } } // colors 232-255 are a grayscale ramp, intentionally leaving out // black and white for (int gray = 0; gray < 24; ++gray) { int level = 10 * gray + 8; COL_RES_256[216 + gray] = new TerminalColor(level, level, level); } } }
src-terminal/com/jediterm/terminal/emulator/ColorPalette.java
package com.jediterm.terminal.emulator; import com.jediterm.terminal.TerminalColor; import java.awt.*; /** * @author traff */ @SuppressWarnings("UseJBColor") public abstract class ColorPalette { public static final ColorPalette XTERM_PALETTE = new ColorPalette() { @Override public Color[] getIndexColors() { return new Color[]{ new Color(0x000000), //Black new Color(0xcd0000), //Red new Color(0x00cd00), //Green new Color(0xcdcd00), //Yellow new Color(0x1e90ff), //Blue new Color(0xcd00cd), //Magenta new Color(0x00cdcd), //Cyan new Color(0xe5e5e5), //White //Bright versions of the ISO colors new Color(0x4c4c4c), //Black new Color(0xff0000), //Red new Color(0x00ff00), //Green new Color(0xffff00), //Yellow new Color(0x4682b4), //Blue new Color(0xff00ff), //Magenta new Color(0x00ffff), //Cyan new Color(0xffffff), //White }; } }; public static final ColorPalette WINDOWS_PALETTE = new ColorPalette() { @Override public Color[] getIndexColors() { return new Color[]{ new Color(0x000000), //Black new Color(0x800000), //Red new Color(0x008000), //Green new Color(0x808000), //Yellow new Color(0x000080), //Blue new Color(0x800080), //Magenta new Color(0x008080), //Cyan new Color(0xc0c0c0), //White //Bright versions of the ISO colors new Color(0x808080), //Black new Color(0xff0000), //Red new Color(0x00ff00), //Green new Color(0xffff00), //Yellow new Color(0x4682b4), //Blue new Color(0xff00ff), //Magenta new Color(0x00ffff), //Cyan new Color(0xffffff), //White }; } }; public abstract Color[] getIndexColors(); public Color getColor(TerminalColor color) { if (color.isIndexed()) { return getIndexColors()[color.getIndex()]; } else { return color.toAwtColor(); } } public static TerminalColor getIndexedColor(int index) { return (index < 16) ? TerminalColor.index(index) : getXTerm256(index); } private static TerminalColor getXTerm256(int index) { return COL_RES_256[index - 16]; } //The code below is translation of xterm's 256colres.pl // colors 16-231 are a 6x6x6 color cube private static final TerminalColor[] COL_RES_256 = new TerminalColor[240]; static { for (int red = 0; red < 6; ++red) { for (int green = 0; green < 6; ++green) { for (int blue = 0; blue < 6; ++blue) { int code = 36 * red + 6 * green + blue; COL_RES_256[code] = new TerminalColor(red > 0 ? (40 * red + 55) : 0, green > 0 ? (40 * green + 55) : 0, blue > 0 ? (40 * blue + 55) : 0); } } } // colors 232-255 are a grayscale ramp, intentionally leaving out // black and white for (int gray = 0; gray < 24; ++gray) { int level = 10 * gray + 8; COL_RES_256[216 + gray] = new TerminalColor(level, level, level); } } }
Fix AIOOBE.
src-terminal/com/jediterm/terminal/emulator/ColorPalette.java
Fix AIOOBE.
Java
apache-2.0
ff6eeb311389f4e11f41b5ca09bf73a13191d21c
0
vanniktech/Emoji,vanniktech/Emoji,vanniktech/Emoji
package com.vanniktech.emoji; import android.content.Context; import android.text.Spannable; import com.vanniktech.emoji.emoji.Emoji; import java.util.ArrayList; import java.util.List; import static com.vanniktech.emoji.EmojiHandler.SpanRangeList.SPAN_NOT_FOUND; final class EmojiHandler { static void addEmojis(final Context context, final Spannable text, final int emojiSize) { final SpanRangeList existingSpanRanges = new SpanRangeList(text); final EmojiManager emojiManager = EmojiManager.getInstance(); int index = 0; while (index < text.length()) { final int existingSpanEnd = existingSpanRanges.spanEnd(index); if (existingSpanEnd == SPAN_NOT_FOUND) { final int nextSpanStart = existingSpanRanges.nextSpanStart(index); final int searchRange = nextSpanStart == SPAN_NOT_FOUND ? text.length() : nextSpanStart; final Emoji found = emojiManager.findEmoji(text.subSequence(index, searchRange)); if (found != null) { text.setSpan(new EmojiSpan(context, found.getResource(), emojiSize), index, index + found.getLength(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE); index += found.getLength(); } else { index++; } } else { index += existingSpanEnd - index; } } } private EmojiHandler() { throw new AssertionError("No instances."); } static final class SpanRangeList { static final int SPAN_NOT_FOUND = -1; private final List<Range> spanRanges = new ArrayList<>(); SpanRangeList(final Spannable text) { for (final EmojiSpan span : text.getSpans(0, text.length(), EmojiSpan.class)) { spanRanges.add(new Range(text.getSpanStart(span), text.getSpanEnd(span))); } } int spanEnd(final int index) { for (final Range spanRange : spanRanges) { if (spanRange.start == index) { return spanRange.end; } } return SPAN_NOT_FOUND; } int nextSpanStart(final int index) { for (final Range spanRange : spanRanges) { if (spanRange.start > index) { return spanRange.start; } } return SPAN_NOT_FOUND; } } static final class Range { final int start; final int end; Range(final int start, final int end) { this.start = start; this.end = end; } } }
emoji/src/main/java/com/vanniktech/emoji/EmojiHandler.java
package com.vanniktech.emoji; import android.content.Context; import android.text.Spannable; import com.vanniktech.emoji.emoji.Emoji; final class EmojiHandler { static void addEmojis(final Context context, final Spannable text, final int emojiSize) { final EmojiSpan[] spans = text.getSpans(0, text.length(), EmojiSpan.class); for (final EmojiSpan oldSpan : spans) { text.removeSpan(oldSpan); } int i = 0; final EmojiManager instance = EmojiManager.getInstance(); while (i < text.length()) { final Emoji found = instance.findEmoji(text.subSequence(i, text.length())); if (found != null) { text.setSpan(new EmojiSpan(context, found.getResource(), emojiSize), i, i + found.getLength(), Spannable.SPAN_INCLUSIVE_EXCLUSIVE); i += found.getLength(); } else { i++; } } } private EmojiHandler() { throw new AssertionError("No instances."); } }
Make the EmojiEditText and EmojiTextView more performant (#93) Thanks once again ��
emoji/src/main/java/com/vanniktech/emoji/EmojiHandler.java
Make the EmojiEditText and EmojiTextView more performant (#93)
Java
apache-2.0
fc25c7b408ca9697b18d5f28da3a4f513cfbe7f4
0
pantsbuild/ivy,pantsbuild/ivy
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivy.plugins.resolver; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.net.URL; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import org.apache.ivy.core.IvyContext; import org.apache.ivy.core.IvyPatternHelper; import org.apache.ivy.core.LogOptions; import org.apache.ivy.core.cache.ModuleDescriptorWriter; import org.apache.ivy.core.cache.RepositoryCacheManager; import org.apache.ivy.core.module.descriptor.Artifact; import org.apache.ivy.core.module.descriptor.DefaultModuleDescriptor; import org.apache.ivy.core.module.descriptor.DependencyDescriptor; import org.apache.ivy.core.module.descriptor.ModuleDescriptor; import org.apache.ivy.core.module.id.ModuleId; import org.apache.ivy.core.module.id.ModuleRevisionId; import org.apache.ivy.core.report.ArtifactDownloadReport; import org.apache.ivy.core.report.DownloadReport; import org.apache.ivy.core.report.DownloadStatus; import org.apache.ivy.core.report.MetadataArtifactDownloadReport; import org.apache.ivy.core.resolve.DownloadOptions; import org.apache.ivy.core.resolve.IvyNode; import org.apache.ivy.core.resolve.ResolveData; import org.apache.ivy.core.resolve.ResolvedModuleRevision; import org.apache.ivy.core.search.ModuleEntry; import org.apache.ivy.core.search.OrganisationEntry; import org.apache.ivy.core.search.RevisionEntry; import org.apache.ivy.plugins.parser.ModuleDescriptorParser; import org.apache.ivy.plugins.parser.ModuleDescriptorParserRegistry; import org.apache.ivy.plugins.parser.xml.XmlModuleDescriptorWriter; import org.apache.ivy.plugins.repository.ArtifactResourceResolver; import org.apache.ivy.plugins.repository.Resource; import org.apache.ivy.plugins.repository.ResourceDownloader; import org.apache.ivy.plugins.repository.url.URLRepository; import org.apache.ivy.plugins.repository.url.URLResource; import org.apache.ivy.plugins.resolver.util.MDResolvedResource; import org.apache.ivy.plugins.resolver.util.ResolvedResource; import org.apache.ivy.plugins.resolver.util.ResourceMDParser; import org.apache.ivy.util.ChecksumHelper; import org.apache.ivy.util.HostUtil; import org.apache.ivy.util.Message; /** * */ public abstract class BasicResolver extends AbstractResolver { /** * Exception thrown internally in getDependency to indicate a dependency is unresolved. * <p> * Due to the contract of getDependency, this exception is never thrown publicly, but rather * converted in a message (either error or verbose) and returning null * </p> */ private static class UnresolvedDependencyException extends RuntimeException { private boolean error; /** * Dependency has not been resolved. * This is not an error and won't log any message. */ public UnresolvedDependencyException() { this("", false); } /** * Dependency has not been resolved. * This is an error and will log a message. */ public UnresolvedDependencyException(String message) { this(message, true); } /** * Dependency has not been resolved. * The boolean tells if it is an error or not, a message will be logged if non empty. */ public UnresolvedDependencyException(String message, boolean error) { super(message); this.error = error; } public boolean isError() { return error; } } public static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyyMMddHHmmss"); private String workspaceName; /** * True if the files resolved are dependent of the environment from which they have been * resolved, false otherwise. In general, relative paths are dependent of the environment, and * absolute paths including machine reference are not. */ private boolean envDependent = true; private List ivyattempts = new ArrayList(); private Map artattempts = new HashMap(); private boolean checkconsistency = true; private boolean allownomd = true; private String checksums = null; private URLRepository extartifactrep = new URLRepository(); // used only to download // external artifacts public BasicResolver() { workspaceName = HostUtil.getLocalHostName(); } public String getWorkspaceName() { return workspaceName; } public void setWorkspaceName(String workspaceName) { this.workspaceName = workspaceName; } public boolean isEnvDependent() { return envDependent; } public void setEnvDependent(boolean envDependent) { this.envDependent = envDependent; } public ResolvedModuleRevision getDependency(DependencyDescriptor dde, ResolveData data) throws ParseException { IvyContext context = IvyContext.pushNewCopyContext(); DependencyDescriptor systemDd = dde; DependencyDescriptor nsDd = fromSystem(dde); context.setDependencyDescriptor(systemDd); context.setResolveData(data); try { clearIvyAttempts(); clearArtifactAttempts(); ModuleRevisionId systemMrid = systemDd.getDependencyRevisionId(); ModuleRevisionId nsMrid = nsDd.getDependencyRevisionId(); checkRevision(systemMrid); boolean isDynamic = getAndCheckIsDynamic(systemMrid); // we first search for the dependency in cache ResolvedModuleRevision rmr = null; rmr = findModuleInCache(systemDd, data); if (rmr != null) { if (rmr.getDescriptor().isDefault() && rmr.getResolver() != this) { Message.verbose("\t" + getName() + ": found revision in cache: " + systemMrid + " (resolved by " + rmr.getResolver().getName() + "): but it's a default one, maybe we can find a better one"); } else { Message.verbose("\t" + getName() + ": revision in cache: " + systemMrid); return rmr; } } checkInterrupted(); ResolvedResource ivyRef = findIvyFileRef(nsDd, data); checkInterrupted(); // get module descriptor ModuleDescriptor nsMd; ModuleDescriptor systemMd = null; if (ivyRef == null) { if (!isAllownomd()) { throw new UnresolvedDependencyException( "\t" + getName() + ": no ivy file found for " + systemMrid, false); } nsMd = DefaultModuleDescriptor.newDefaultInstance(nsMrid, nsDd .getAllDependencyArtifacts()); ResolvedResource artifactRef = findFirstArtifactRef(nsMd, nsDd, data); checkInterrupted(); if (artifactRef == null) { throw new UnresolvedDependencyException("\t" + getName() + ": no ivy file nor artifact found for " + systemMrid, false); } else { long lastModified = artifactRef.getLastModified(); if (lastModified != 0 && nsMd instanceof DefaultModuleDescriptor) { ((DefaultModuleDescriptor) nsMd).setLastModified(lastModified); } Message.verbose("\t" + getName() + ": no ivy file found for " + systemMrid + ": using default data"); if (isDynamic) { nsMd.setResolvedModuleRevisionId(ModuleRevisionId.newInstance(nsMrid, artifactRef.getRevision())); } systemMd = toSystem(nsMd); MetadataArtifactDownloadReport madr = new MetadataArtifactDownloadReport(systemMd.getMetadataArtifact()); madr.setDownloadStatus(DownloadStatus.NO); madr.setSearched(true); rmr = new ResolvedModuleRevision(this, this, systemMd, madr); } } else { if (ivyRef instanceof MDResolvedResource) { rmr = ((MDResolvedResource) ivyRef).getResolvedModuleRevision(); } if (rmr == null) { rmr = parse(ivyRef, systemDd, data); if (rmr == null) { throw new UnresolvedDependencyException(); } } if (!rmr.getReport().isDownloaded()) { return toSystem(rmr); } else { nsMd = rmr.getDescriptor(); // check descriptor data is in sync with resource revision and names systemMd = toSystem(nsMd); if (checkconsistency) { checkDescriptorConsistency(systemMrid, systemMd, ivyRef); checkDescriptorConsistency(nsMrid, nsMd, ivyRef); } else { if (systemMd instanceof DefaultModuleDescriptor) { String revision = getRevision(ivyRef, systemMrid, systemMd); ((DefaultModuleDescriptor) systemMd).setModuleRevisionId( ModuleRevisionId.newInstance(systemMrid, revision)); } else { Message.warn( "consistency disabled with instance of non DefaultModuleDescriptor..." + " module info can't be updated, so consistency check will be done"); checkDescriptorConsistency(nsMrid, nsMd, ivyRef); checkDescriptorConsistency(systemMrid, systemMd, ivyRef); } } MetadataArtifactDownloadReport madr = new MetadataArtifactDownloadReport(systemMd.getMetadataArtifact()); madr.setDownloadStatus(rmr.getReport().getDownloadStatus()); madr.setDownloadDetails(rmr.getReport().getDownloadDetails()); madr.setArtifactOrigin(rmr.getReport().getArtifactOrigin()); madr.setDownloadTimeMillis(rmr.getReport().getDownloadTimeMillis()); madr.setSize(rmr.getReport().getSize()); madr.setOriginalLocalFile(rmr.getReport().getOriginalLocalFile()); madr.setSearched(true); rmr = new ResolvedModuleRevision(this, this, systemMd, madr); } } resolveAndCheckRevision(systemMd, systemMrid, ivyRef, isDynamic); resolveAndCheckPublicationDate(systemDd, systemMd, systemMrid, data); checkNotConvertedExclusionRule(systemMd, ivyRef, data); cacheModuleDescriptor(systemMd, systemMrid, ivyRef, rmr); return rmr; } catch (UnresolvedDependencyException ex) { if (ex.getMessage().length() > 0) { if (ex.isError()) { Message.error(ex.getMessage()); } else { Message.verbose(ex.getMessage()); } } return null; } finally { IvyContext.popContext(); } } private void cacheModuleDescriptor(ModuleDescriptor systemMd, ModuleRevisionId systemMrid, ResolvedResource ivyRef, ResolvedModuleRevision rmr) { RepositoryCacheManager cacheManager = getRepositoryCacheManager(); final ModuleDescriptorParser parser = systemMd.getParser(); // the metadata artifact which was used to cache the original metadata file Artifact requestedMetadataArtifact = ivyRef == null ? systemMd.getMetadataArtifact() : parser.getMetadataArtifact( ModuleRevisionId.newInstance(systemMrid, ivyRef.getRevision()), ivyRef.getResource()); cacheManager.originalToCachedModuleDescriptor(this, ivyRef, requestedMetadataArtifact, rmr, new ModuleDescriptorWriter() { public void write(ResolvedResource originalMdResource, ModuleDescriptor md, File src, File dest) throws IOException, ParseException { if (originalMdResource == null) { // a basic ivy file is written containing default data XmlModuleDescriptorWriter.write(md, dest); } else { // copy and update ivy file from source to cache parser.toIvyFile( new FileInputStream(src), originalMdResource.getResource(), dest, md); long repLastModified = originalMdResource.getLastModified(); if (repLastModified > 0) { dest.setLastModified(repLastModified); } } } }); } private void checkNotConvertedExclusionRule(ModuleDescriptor systemMd, ResolvedResource ivyRef, ResolveData data) { if (!systemMd.isDefault() && data.getSettings().logNotConvertedExclusionRule() && systemMd instanceof DefaultModuleDescriptor) { DefaultModuleDescriptor dmd = (DefaultModuleDescriptor) systemMd; if (dmd.isNamespaceUseful()) { Message.warn( "the module descriptor " + ivyRef.getResource() + " has information which can't be converted into " + "the system namespace. " + "It will require the availability of the namespace '" + getNamespace().getName() + "' to be fully usable."); } } } private void resolveAndCheckPublicationDate(DependencyDescriptor systemDd, ModuleDescriptor systemMd, ModuleRevisionId systemMrid, ResolveData data) { // resolve and check publication date if (data.getDate() != null) { long pubDate = getPublicationDate(systemMd, systemDd, data); if (pubDate > data.getDate().getTime()) { throw new UnresolvedDependencyException( "\t" + getName() + ": unacceptable publication date => was=" + new Date(pubDate) + " required=" + data.getDate()); } else if (pubDate == -1) { throw new UnresolvedDependencyException("\t" + getName() + ": impossible to guess publication date: artifact missing for " + systemMrid); } systemMd.setResolvedPublicationDate(new Date(pubDate)); } } private void checkModuleDescriptorRevision(ModuleDescriptor systemMd, ModuleRevisionId systemMrid) { if (!getSettings().getVersionMatcher().accept(systemMrid, systemMd)) { throw new UnresolvedDependencyException( "\t" + getName() + ": unacceptable revision => was=" + systemMd.getModuleRevisionId().getRevision() + " required=" + systemMrid.getRevision()); } } private boolean getAndCheckIsDynamic(ModuleRevisionId systemMrid) { boolean isDynamic = getSettings().getVersionMatcher().isDynamic(systemMrid); if (isDynamic && !acceptLatest()) { throw new UnresolvedDependencyException( "dynamic revisions not handled by " + getClass().getName() + ". impossible to resolve " + systemMrid); } return isDynamic; } private void checkRevision(ModuleRevisionId systemMrid) { // check revision int index = systemMrid.getRevision().indexOf("@"); if (index != -1 && !systemMrid.getRevision().substring(index + 1).equals(workspaceName)) { throw new UnresolvedDependencyException("\t" + getName() + ": unhandled revision => " + systemMrid.getRevision()); } } private void resolveAndCheckRevision(ModuleDescriptor systemMd, ModuleRevisionId dependencyConstraint, ResolvedResource ivyRef, boolean isDynamic) { // we get the resolved module revision id from the descriptor: it may contain extra // attributes that were not included in the dependency constraint ModuleRevisionId resolvedMrid = systemMd.getResolvedModuleRevisionId(); if (resolvedMrid.getRevision() == null || resolvedMrid.getRevision().length() == 0) { if (!isDynamic) { resolvedMrid = ModuleRevisionId.newInstance( resolvedMrid, dependencyConstraint.getRevision()); } else if (ivyRef.getRevision() == null || ivyRef.getRevision().length() == 0) { resolvedMrid = ModuleRevisionId.newInstance(resolvedMrid, "working@" + getName()); } else { resolvedMrid = ModuleRevisionId.newInstance(resolvedMrid, ivyRef .getRevision()); } } if (isDynamic) { Message.verbose("\t\t[" + toSystem(resolvedMrid).getRevision() + "] " + dependencyConstraint.getModuleId()); } systemMd.setResolvedModuleRevisionId(resolvedMrid); checkModuleDescriptorRevision(systemMd, dependencyConstraint); } private String getRevision(ResolvedResource ivyRef, ModuleRevisionId askedMrid, ModuleDescriptor md) throws ParseException { String revision = ivyRef.getRevision(); if (revision == null) { Message.debug("no revision found in reference for " + askedMrid); if (getSettings().getVersionMatcher().isDynamic(askedMrid)) { if (md.getModuleRevisionId().getRevision() == null) { return "working@" + getName(); } else { Message.debug("using " + askedMrid); revision = md.getModuleRevisionId().getRevision(); } } else { Message.debug("using " + askedMrid); revision = askedMrid.getRevision(); } } return revision; } public ResolvedModuleRevision parse(final ResolvedResource mdRef, DependencyDescriptor dd, ResolveData data) throws ParseException { DependencyDescriptor nsDd = dd; dd = toSystem(nsDd); ModuleRevisionId mrid = dd.getDependencyRevisionId(); ModuleDescriptorParser parser = ModuleDescriptorParserRegistry .getInstance().getParser(mdRef.getResource()); if (parser == null) { Message.warn("no module descriptor parser available for " + mdRef.getResource()); return null; } Message.verbose("\t" + getName() + ": found md file for " + mrid); Message.verbose("\t\t=> " + mdRef); Message.debug("\tparser = " + parser); ModuleRevisionId resolvedMrid = mrid; // first check if this dependency has not yet been resolved if (getSettings().getVersionMatcher().isDynamic(mrid)) { resolvedMrid = ModuleRevisionId.newInstance(mrid, mdRef.getRevision()); IvyNode node = data.getNode(resolvedMrid); if (node != null && node.getModuleRevision() != null) { // this revision has already be resolved : return it if (node.getDescriptor() != null && node.getDescriptor().isDefault()) { Message.verbose("\t" + getName() + ": found already resolved revision: " + resolvedMrid + ": but it's a default one, maybe we can find a better one"); } else { Message.verbose("\t" + getName() + ": revision already resolved: " + resolvedMrid); node.getModuleRevision().getReport().setSearched(true); return node.getModuleRevision(); } } } Artifact moduleArtifact = parser.getMetadataArtifact(resolvedMrid, mdRef.getResource()); return getRepositoryCacheManager().cacheModuleDescriptor( this, mdRef, dd, moduleArtifact, downloader, getCacheOptions(data)); } protected ResourceMDParser getRMDParser(final DependencyDescriptor dd, final ResolveData data) { return new ResourceMDParser() { public MDResolvedResource parse(Resource resource, String rev) { try { ResolvedModuleRevision rmr = BasicResolver.this.parse(new ResolvedResource( resource, rev), dd, data); if (rmr == null) { return null; } else { return new MDResolvedResource(resource, rev, rmr); } } catch (ParseException e) { Message.warn("Failed to parse the file '" + resource + "': " + e.getMessage()); return null; } } }; } protected ResourceMDParser getDefaultRMDParser(final ModuleId mid) { return new ResourceMDParser() { public MDResolvedResource parse(Resource resource, String rev) { DefaultModuleDescriptor md = DefaultModuleDescriptor.newDefaultInstance(new ModuleRevisionId(mid, rev)); MetadataArtifactDownloadReport madr = new MetadataArtifactDownloadReport(md.getMetadataArtifact()); madr.setDownloadStatus(DownloadStatus.NO); madr.setSearched(true); return new MDResolvedResource(resource, rev, new ResolvedModuleRevision( BasicResolver.this, BasicResolver.this, md, madr)); } }; } // private boolean isResolved(ResolveData data, ModuleRevisionId mrid) { // IvyNode node = getSystemNode(data, mrid); // return node != null && node.getModuleRevision() != null; // } // private void checkDescriptorConsistency(ModuleRevisionId mrid, ModuleDescriptor md, ResolvedResource ivyRef) throws ParseException { boolean ok = true; StringBuffer errors = new StringBuffer(); if (!mrid.getOrganisation().equals(md.getModuleRevisionId().getOrganisation())) { Message.error("\t" + getName() + ": bad organisation found in " + ivyRef.getResource() + ": expected='" + mrid.getOrganisation() + "' found='" + md.getModuleRevisionId().getOrganisation() + "'"); errors.append("bad organisation: expected='" + mrid.getOrganisation() + "' found='" + md.getModuleRevisionId().getOrganisation() + "'; "); ok = false; } if (!mrid.getName().equals(md.getModuleRevisionId().getName())) { Message.error("\t" + getName() + ": bad module name found in " + ivyRef.getResource() + ": expected='" + mrid.getName() + " found='" + md.getModuleRevisionId().getName() + "'"); errors.append("bad module name: expected='" + mrid.getName() + "' found='" + md.getModuleRevisionId().getName() + "'; "); ok = false; } if (ivyRef.getRevision() != null && !ivyRef.getRevision().startsWith("working@")) { ModuleRevisionId expectedMrid = ModuleRevisionId .newInstance(mrid, ivyRef.getRevision()); if (!getSettings().getVersionMatcher().accept(expectedMrid, md)) { Message.error("\t" + getName() + ": bad revision found in " + ivyRef.getResource() + ": expected='" + ivyRef.getRevision() + " found='" + md.getModuleRevisionId().getRevision() + "'"); errors.append("bad revision: expected='" + ivyRef.getRevision() + "' found='" + md.getModuleRevisionId().getRevision() + "'; "); ok = false; } } if (!getSettings().getStatusManager().isStatus(md.getStatus())) { Message.error("\t" + getName() + ": bad status found in " + ivyRef.getResource() + ": '" + md.getStatus() + "'"); errors.append("bad status: '" + md.getStatus() + "'; "); ok = false; } if (!ok) { throw new ParseException("inconsistent module descriptor file found in '" + ivyRef.getResource() + "': " + errors, 0); } } protected void clearIvyAttempts() { ivyattempts.clear(); clearArtifactAttempts(); } protected void logIvyAttempt(String attempt) { ivyattempts.add(attempt); Message.verbose("\t\ttried " + attempt); } protected void logArtifactAttempt(Artifact art, String attempt) { List attempts = (List) artattempts.get(art); if (attempts == null) { attempts = new ArrayList(); artattempts.put(art, attempts); } attempts.add(attempt); Message.verbose("\t\ttried " + attempt); } protected void logAttempt(String attempt) { Artifact currentArtifact = (Artifact) IvyContext.getContext().get(getName() + ".artifact"); if (currentArtifact != null) { logArtifactAttempt(currentArtifact, attempt); } else { logIvyAttempt(attempt); } } public void reportFailure() { Message.warn("==== " + getName() + ": tried"); for (ListIterator iter = ivyattempts.listIterator(); iter.hasNext();) { String m = (String) iter.next(); Message.warn(" " + m); } for (Iterator iter = artattempts.keySet().iterator(); iter.hasNext();) { Artifact art = (Artifact) iter.next(); List attempts = (List) artattempts.get(art); if (attempts != null) { Message.warn(" -- artifact " + art + ":"); for (ListIterator iterator = attempts.listIterator(); iterator.hasNext();) { String m = (String) iterator.next(); Message.warn(" " + m); } } } } public void reportFailure(Artifact art) { Message.warn("==== " + getName() + ": tried"); List attempts = (List) artattempts.get(art); if (attempts != null) { for (ListIterator iter = attempts.listIterator(); iter.hasNext();) { String m = (String) iter.next(); Message.warn(" " + m); } } } protected boolean acceptLatest() { return true; } public DownloadReport download(Artifact[] artifacts, DownloadOptions options) { RepositoryCacheManager cacheManager = getRepositoryCacheManager(); clearArtifactAttempts(); DownloadReport dr = new DownloadReport(); for (int i = 0; i < artifacts.length; i++) { ArtifactDownloadReport adr = cacheManager.download( artifacts[i], artifactResourceResolver, downloader, getCacheDownloadOptions(options)); if (DownloadStatus.FAILED == adr.getDownloadStatus()) { if (!ArtifactDownloadReport.MISSING_ARTIFACT.equals(adr.getDownloadDetails())) { Message.warn("\t" + adr); } } else if (DownloadStatus.NO == adr.getDownloadStatus()) { Message.verbose("\t" + adr); } else if (LogOptions.LOG_QUIET.equals(options.getLog())) { Message.verbose("\t" + adr); } else { Message.info("\t" + adr); } dr.addArtifactReport(adr); checkInterrupted(); } return dr; } protected void clearArtifactAttempts() { artattempts.clear(); } public boolean exists(Artifact artifact) { ResolvedResource artifactRef = getArtifactRef(artifact, null); if (artifactRef != null) { return artifactRef.getResource().exists(); } return false; } protected long getPublicationDate(ModuleDescriptor md, DependencyDescriptor dd, ResolveData data) { if (md.getPublicationDate() != null) { return md.getPublicationDate().getTime(); } ResolvedResource artifactRef = findFirstArtifactRef(md, dd, data); if (artifactRef != null) { return artifactRef.getLastModified(); } return -1; } public String toString() { return getName(); } public String[] listTokenValues(String token, Map otherTokenValues) { Collection ret = findNames(otherTokenValues, token); return (String[]) ret.toArray(new String[ret.size()]); } public OrganisationEntry[] listOrganisations() { Collection names = findNames(Collections.EMPTY_MAP, IvyPatternHelper.ORGANISATION_KEY); OrganisationEntry[] ret = new OrganisationEntry[names.size()]; int i = 0; for (Iterator iter = names.iterator(); iter.hasNext(); i++) { String org = (String) iter.next(); ret[i] = new OrganisationEntry(this, org); } return ret; } public ModuleEntry[] listModules(OrganisationEntry org) { Map tokenValues = new HashMap(); tokenValues.put(IvyPatternHelper.ORGANISATION_KEY, org.getOrganisation()); Collection names = findNames(tokenValues, IvyPatternHelper.MODULE_KEY); ModuleEntry[] ret = new ModuleEntry[names.size()]; int i = 0; for (Iterator iter = names.iterator(); iter.hasNext(); i++) { String name = (String) iter.next(); ret[i] = new ModuleEntry(org, name); } return ret; } public RevisionEntry[] listRevisions(ModuleEntry mod) { Map tokenValues = new HashMap(); tokenValues.put(IvyPatternHelper.ORGANISATION_KEY, mod.getOrganisation()); tokenValues.put(IvyPatternHelper.MODULE_KEY, mod.getModule()); Collection names = findNames(tokenValues, IvyPatternHelper.REVISION_KEY); RevisionEntry[] ret = new RevisionEntry[names.size()]; int i = 0; for (Iterator iter = names.iterator(); iter.hasNext(); i++) { String name = (String) iter.next(); ret[i] = new RevisionEntry(mod, name); } return ret; } protected abstract Collection findNames(Map tokenValues, String token); protected ResolvedResource findFirstArtifactRef(ModuleDescriptor md, DependencyDescriptor dd, ResolveData data) { ResolvedResource ret = null; String[] conf = md.getConfigurationsNames(); for (int i = 0; i < conf.length; i++) { Artifact[] artifacts = md.getArtifacts(conf[i]); for (int j = 0; j < artifacts.length; j++) { ret = getArtifactRef(artifacts[j], data.getDate()); if (ret != null) { return ret; } } } return null; } protected long getAndCheck(Resource resource, File dest) throws IOException { long size = get(resource, dest); String[] checksums = getChecksumAlgorithms(); boolean checked = false; for (int i = 0; i < checksums.length && !checked; i++) { checked = check(resource, dest, checksums[i]); } return size; } /** * Checks the given resource checksum if a checksum resource exists. * * @param resource * the resource to check * @param dest * the file where the resource has been downloaded * @param algorithm * the checksum algorithm to use * @return true if the checksum has been successfully checked, false if the checksum wasn't * available * @throws IOException * if a checksum exist but do not match the downloaded file checksum */ private boolean check(Resource resource, File dest, String algorithm) throws IOException { Resource csRes = resource.clone(resource.getName() + "." + algorithm); if (csRes.exists()) { Message.debug(algorithm + " file found for " + resource + ": checking..."); File csFile = File.createTempFile("ivytmp", algorithm); try { get(csRes, csFile); try { ChecksumHelper.check(dest, csFile, algorithm); Message.verbose(algorithm + " OK for " + resource); return true; } catch (IOException ex) { dest.delete(); throw ex; } } finally { csFile.delete(); } } else { return false; } } protected ResolvedResource getArtifactRef(Artifact artifact, Date date) { IvyContext.getContext().set(getName() + ".artifact", artifact); try { ResolvedResource ret = findArtifactRef(artifact, date); if (ret == null && artifact.getUrl() != null) { URL url = artifact.getUrl(); Message.verbose("\tusing url for " + artifact + ": " + url); logArtifactAttempt(artifact, url.toExternalForm()); ret = new ResolvedResource(new URLResource(url), artifact.getModuleRevisionId() .getRevision()); } return ret; } finally { IvyContext.getContext().set(getName() + ".artifact", null); } } protected abstract ResolvedResource findArtifactRef(Artifact artifact, Date date); protected abstract long get(Resource resource, File dest) throws IOException; public boolean isCheckconsistency() { return checkconsistency; } public void setCheckconsistency(boolean checkConsitency) { checkconsistency = checkConsitency; } public boolean isAllownomd() { return allownomd; } public void setAllownomd(boolean b) { allownomd = b; } public String[] getChecksumAlgorithms() { String csDef = checksums == null ? getSettings().getVariable("ivy.checksums") : checksums; if (csDef == null) { return new String[0]; } // csDef is a comma separated list of checksum algorithms to use with this resolver // we parse and return it as a String[] String[] checksums = csDef.split(","); List algos = new ArrayList(); for (int i = 0; i < checksums.length; i++) { String cs = checksums[i].trim(); if (!"".equals(cs) && !"none".equals(cs)) { algos.add(cs); } } return (String[]) algos.toArray(new String[algos.size()]); } public void setChecksums(String checksums) { this.checksums = checksums; } private final ArtifactResourceResolver artifactResourceResolver = new ArtifactResourceResolver() { public ResolvedResource resolve(Artifact artifact) { artifact = fromSystem(artifact); return getArtifactRef(artifact, null); } }; private final ResourceDownloader downloader = new ResourceDownloader() { public void download(Artifact artifact, Resource resource, File dest) throws IOException { if (dest.exists()) { dest.delete(); } File part = new File(dest.getAbsolutePath() + ".part"); if (resource.getName().equals( String.valueOf(artifact.getUrl()))) { if (part.getParentFile() != null) { part.getParentFile().mkdirs(); } extartifactrep.get(resource.getName(), part); } else { getAndCheck(resource, part); } if (!part.renameTo(dest)) { throw new IOException( "impossible to move part file to definitive one: " + part + " -> " + dest); } } }; }
src/java/org/apache/ivy/plugins/resolver/BasicResolver.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivy.plugins.resolver; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.net.URL; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import org.apache.ivy.core.IvyContext; import org.apache.ivy.core.IvyPatternHelper; import org.apache.ivy.core.LogOptions; import org.apache.ivy.core.cache.ModuleDescriptorWriter; import org.apache.ivy.core.cache.RepositoryCacheManager; import org.apache.ivy.core.module.descriptor.Artifact; import org.apache.ivy.core.module.descriptor.DefaultModuleDescriptor; import org.apache.ivy.core.module.descriptor.DependencyDescriptor; import org.apache.ivy.core.module.descriptor.ModuleDescriptor; import org.apache.ivy.core.module.id.ModuleId; import org.apache.ivy.core.module.id.ModuleRevisionId; import org.apache.ivy.core.report.ArtifactDownloadReport; import org.apache.ivy.core.report.DownloadReport; import org.apache.ivy.core.report.DownloadStatus; import org.apache.ivy.core.report.MetadataArtifactDownloadReport; import org.apache.ivy.core.resolve.DownloadOptions; import org.apache.ivy.core.resolve.IvyNode; import org.apache.ivy.core.resolve.ResolveData; import org.apache.ivy.core.resolve.ResolvedModuleRevision; import org.apache.ivy.core.search.ModuleEntry; import org.apache.ivy.core.search.OrganisationEntry; import org.apache.ivy.core.search.RevisionEntry; import org.apache.ivy.plugins.parser.ModuleDescriptorParser; import org.apache.ivy.plugins.parser.ModuleDescriptorParserRegistry; import org.apache.ivy.plugins.parser.xml.XmlModuleDescriptorWriter; import org.apache.ivy.plugins.repository.ArtifactResourceResolver; import org.apache.ivy.plugins.repository.Resource; import org.apache.ivy.plugins.repository.ResourceDownloader; import org.apache.ivy.plugins.repository.url.URLRepository; import org.apache.ivy.plugins.repository.url.URLResource; import org.apache.ivy.plugins.resolver.util.MDResolvedResource; import org.apache.ivy.plugins.resolver.util.ResolvedResource; import org.apache.ivy.plugins.resolver.util.ResourceMDParser; import org.apache.ivy.util.ChecksumHelper; import org.apache.ivy.util.HostUtil; import org.apache.ivy.util.Message; /** * */ public abstract class BasicResolver extends AbstractResolver { /** * Exception thrown internally in getDependency to indicate a dependency is unresolved. * <p> * Due to the contract of getDependency, this exception is never thrown publicly, but rather * converted in a message (either error or verbose) and returning null * </p> */ private static class UnresolvedDependencyException extends RuntimeException { private boolean error; /** * Dependency has not been resolved. * This is not an error and won't log any message. */ public UnresolvedDependencyException() { this("", false); } /** * Dependency has not been resolved. * This is an error and will log a message. */ public UnresolvedDependencyException(String message) { this(message, true); } /** * Dependency has not been resolved. * The boolean tells if it is an error or not, a message will be logged if non empty. */ public UnresolvedDependencyException(String message, boolean error) { super(message); this.error = error; } public boolean isError() { return error; } } public static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyyMMddHHmmss"); private String workspaceName; /** * True if the files resolved are dependent of the environment from which they have been * resolved, false otherwise. In general, relative paths are dependent of the environment, and * absolute paths including machine reference are not. */ private boolean envDependent = true; private List ivyattempts = new ArrayList(); private Map artattempts = new HashMap(); private boolean checkconsistency = true; private boolean allownomd = true; private String checksums = null; private URLRepository extartifactrep = new URLRepository(); // used only to download // external artifacts public BasicResolver() { workspaceName = HostUtil.getLocalHostName(); } public String getWorkspaceName() { return workspaceName; } public void setWorkspaceName(String workspaceName) { this.workspaceName = workspaceName; } public boolean isEnvDependent() { return envDependent; } public void setEnvDependent(boolean envDependent) { this.envDependent = envDependent; } public ResolvedModuleRevision getDependency(DependencyDescriptor dde, ResolveData data) throws ParseException { IvyContext context = IvyContext.pushNewCopyContext(); DependencyDescriptor systemDd = dde; DependencyDescriptor nsDd = fromSystem(dde); context.setDependencyDescriptor(systemDd); context.setResolveData(data); try { clearIvyAttempts(); clearArtifactAttempts(); ModuleRevisionId systemMrid = systemDd.getDependencyRevisionId(); ModuleRevisionId nsMrid = nsDd.getDependencyRevisionId(); checkRevision(systemMrid); boolean isDynamic = getAndCheckIsDynamic(systemMrid); // we first search for the dependency in cache ResolvedModuleRevision rmr = null; rmr = findModuleInCache(systemDd, data); if (rmr != null) { if (rmr.getDescriptor().isDefault() && rmr.getResolver() != this) { Message.verbose("\t" + getName() + ": found revision in cache: " + systemMrid + " (resolved by " + rmr.getResolver().getName() + "): but it's a default one, maybe we can find a better one"); } else { Message.verbose("\t" + getName() + ": revision in cache: " + systemMrid); return rmr; } } checkInterrupted(); ResolvedResource ivyRef = findIvyFileRef(nsDd, data); checkInterrupted(); // get module descriptor ModuleDescriptor nsMd; ModuleDescriptor systemMd = null; if (ivyRef == null) { if (!isAllownomd()) { throw new UnresolvedDependencyException( "\t" + getName() + ": no ivy file found for " + systemMrid, false); } nsMd = DefaultModuleDescriptor.newDefaultInstance(nsMrid, nsDd .getAllDependencyArtifacts()); ResolvedResource artifactRef = findFirstArtifactRef(nsMd, nsDd, data); checkInterrupted(); if (artifactRef == null) { throw new UnresolvedDependencyException("\t" + getName() + ": no ivy file nor artifact found for " + systemMrid, false); } else { long lastModified = artifactRef.getLastModified(); if (lastModified != 0 && nsMd instanceof DefaultModuleDescriptor) { ((DefaultModuleDescriptor) nsMd).setLastModified(lastModified); } Message.verbose("\t" + getName() + ": no ivy file found for " + systemMrid + ": using default data"); if (isDynamic) { nsMd.setResolvedModuleRevisionId(ModuleRevisionId.newInstance(nsMrid, artifactRef.getRevision())); } systemMd = toSystem(nsMd); MetadataArtifactDownloadReport madr = new MetadataArtifactDownloadReport(systemMd.getMetadataArtifact()); madr.setDownloadStatus(DownloadStatus.NO); madr.setSearched(true); rmr = new ResolvedModuleRevision(this, this, systemMd, madr); } } else { if (ivyRef instanceof MDResolvedResource) { rmr = ((MDResolvedResource) ivyRef).getResolvedModuleRevision(); } if (rmr == null) { rmr = parse(ivyRef, systemDd, data); if (rmr == null) { throw new UnresolvedDependencyException(); } } if (!rmr.getReport().isDownloaded()) { return toSystem(rmr); } else { nsMd = rmr.getDescriptor(); // check descriptor data is in sync with resource revision and names systemMd = toSystem(nsMd); if (checkconsistency) { checkDescriptorConsistency(systemMrid, systemMd, ivyRef); checkDescriptorConsistency(nsMrid, nsMd, ivyRef); } else { if (systemMd instanceof DefaultModuleDescriptor) { String revision = getRevision(ivyRef, systemMrid, systemMd); ((DefaultModuleDescriptor) systemMd).setModuleRevisionId( ModuleRevisionId.newInstance(systemMrid, revision)); } else { Message.warn( "consistency disabled with instance of non DefaultModuleDescriptor..." + " module info can't be updated, so consistency check will be done"); checkDescriptorConsistency(nsMrid, nsMd, ivyRef); checkDescriptorConsistency(systemMrid, systemMd, ivyRef); } } MetadataArtifactDownloadReport madr = new MetadataArtifactDownloadReport(systemMd.getMetadataArtifact()); madr.setDownloadStatus(rmr.getReport().getDownloadStatus()); madr.setDownloadDetails(rmr.getReport().getDownloadDetails()); madr.setArtifactOrigin(rmr.getReport().getArtifactOrigin()); madr.setDownloadTimeMillis(rmr.getReport().getDownloadTimeMillis()); madr.setSize(rmr.getReport().getSize()); madr.setOriginalLocalFile(rmr.getReport().getOriginalLocalFile()); madr.setSearched(true); rmr = new ResolvedModuleRevision(this, this, systemMd, madr); } } resolveAndCheckRevision(systemMd, systemMrid, ivyRef, isDynamic); resolveAndCheckPublicationDate(systemDd, systemMd, systemMrid, data); checkNotConvertedExclusionRule(systemMd, ivyRef, data); cacheModuleDescriptor(systemMd, systemMrid, ivyRef, rmr); return rmr; } catch (UnresolvedDependencyException ex) { if (ex.getMessage().length() > 0) { if (ex.isError()) { Message.error(ex.getMessage()); } else { Message.verbose(ex.getMessage()); } } return null; } finally { IvyContext.popContext(); } } private void cacheModuleDescriptor(ModuleDescriptor systemMd, ModuleRevisionId systemMrid, ResolvedResource ivyRef, ResolvedModuleRevision rmr) { RepositoryCacheManager cacheManager = getRepositoryCacheManager(); final ModuleDescriptorParser parser = systemMd.getParser(); // the metadata artifact which was used to cache the original metadata file Artifact requestedMetadataArtifact = ivyRef == null ? systemMd.getMetadataArtifact() : parser.getMetadataArtifact( ModuleRevisionId.newInstance(systemMrid, ivyRef.getRevision()), ivyRef.getResource()); cacheManager.originalToCachedModuleDescriptor(this, ivyRef, requestedMetadataArtifact, rmr, new ModuleDescriptorWriter() { public void write(ResolvedResource originalMdResource, ModuleDescriptor md, File src, File dest) throws IOException, ParseException { if (originalMdResource == null) { // a basic ivy file is written containing default data XmlModuleDescriptorWriter.write(md, dest); } else { // copy and update ivy file from source to cache parser.toIvyFile( new FileInputStream(src), originalMdResource.getResource(), dest, md); long repLastModified = originalMdResource.getLastModified(); if (repLastModified > 0) { dest.setLastModified(repLastModified); } } } }); } private void checkNotConvertedExclusionRule(ModuleDescriptor systemMd, ResolvedResource ivyRef, ResolveData data) { if (!systemMd.isDefault() && data.getSettings().logNotConvertedExclusionRule() && systemMd instanceof DefaultModuleDescriptor) { DefaultModuleDescriptor dmd = (DefaultModuleDescriptor) systemMd; if (dmd.isNamespaceUseful()) { Message.warn( "the module descriptor " + ivyRef.getResource() + " has information which can't be converted into " + "the system namespace. " + "It will require the availability of the namespace '" + getNamespace().getName() + "' to be fully usable."); } } } private void resolveAndCheckPublicationDate(DependencyDescriptor systemDd, ModuleDescriptor systemMd, ModuleRevisionId systemMrid, ResolveData data) { // resolve and check publication date if (data.getDate() != null) { long pubDate = getPublicationDate(systemMd, systemDd, data); if (pubDate > data.getDate().getTime()) { throw new UnresolvedDependencyException( "\t" + getName() + ": unacceptable publication date => was=" + new Date(pubDate) + " required=" + data.getDate()); } else if (pubDate == -1) { throw new UnresolvedDependencyException("\t" + getName() + ": impossible to guess publication date: artifact missing for " + systemMrid); } systemMd.setResolvedPublicationDate(new Date(pubDate)); } } private void checkModuleDescriptorRevision(ModuleDescriptor systemMd, ModuleRevisionId systemMrid) { if (!getSettings().getVersionMatcher().accept(systemMrid, systemMd)) { throw new UnresolvedDependencyException( "\t" + getName() + ": unacceptable revision => was=" + systemMd.getModuleRevisionId().getRevision() + " required=" + systemMrid.getRevision()); } } private boolean getAndCheckIsDynamic(ModuleRevisionId systemMrid) { boolean isDynamic = getSettings().getVersionMatcher().isDynamic(systemMrid); if (isDynamic && !acceptLatest()) { throw new UnresolvedDependencyException( "dynamic revisions not handled by " + getClass().getName() + ". impossible to resolve " + systemMrid); } return isDynamic; } private void checkRevision(ModuleRevisionId systemMrid) { // check revision int index = systemMrid.getRevision().indexOf("@"); if (index != -1 && !systemMrid.getRevision().substring(index + 1).equals(workspaceName)) { throw new UnresolvedDependencyException("\t" + getName() + ": unhandled revision => " + systemMrid.getRevision()); } } private void resolveAndCheckRevision(ModuleDescriptor systemMd, ModuleRevisionId systemMrid, ResolvedResource ivyRef, boolean isDynamic) { ModuleRevisionId resolvedMrid = systemMrid; if (isDynamic) { resolvedMrid = systemMd.getResolvedModuleRevisionId(); if (resolvedMrid.getRevision() == null || resolvedMrid.getRevision().length() == 0) { if (ivyRef.getRevision() == null || ivyRef.getRevision().length() == 0) { resolvedMrid = ModuleRevisionId.newInstance(resolvedMrid, "working@" + getName()); } else { resolvedMrid = ModuleRevisionId.newInstance(resolvedMrid, ivyRef .getRevision()); } } Message.verbose("\t\t[" + toSystem(resolvedMrid).getRevision() + "] " + systemMrid.getModuleId()); } systemMd.setResolvedModuleRevisionId(resolvedMrid); checkModuleDescriptorRevision(systemMd, systemMrid); } private String getRevision(ResolvedResource ivyRef, ModuleRevisionId askedMrid, ModuleDescriptor md) throws ParseException { String revision = ivyRef.getRevision(); if (revision == null) { Message.debug("no revision found in reference for " + askedMrid); if (getSettings().getVersionMatcher().isDynamic(askedMrid)) { if (md.getModuleRevisionId().getRevision() == null) { return "working@" + getName(); } else { Message.debug("using " + askedMrid); revision = md.getModuleRevisionId().getRevision(); } } else { Message.debug("using " + askedMrid); revision = askedMrid.getRevision(); } } return revision; } public ResolvedModuleRevision parse(final ResolvedResource mdRef, DependencyDescriptor dd, ResolveData data) throws ParseException { DependencyDescriptor nsDd = dd; dd = toSystem(nsDd); ModuleRevisionId mrid = dd.getDependencyRevisionId(); ModuleDescriptorParser parser = ModuleDescriptorParserRegistry .getInstance().getParser(mdRef.getResource()); if (parser == null) { Message.warn("no module descriptor parser available for " + mdRef.getResource()); return null; } Message.verbose("\t" + getName() + ": found md file for " + mrid); Message.verbose("\t\t=> " + mdRef); Message.debug("\tparser = " + parser); ModuleRevisionId resolvedMrid = mrid; // first check if this dependency has not yet been resolved if (getSettings().getVersionMatcher().isDynamic(mrid)) { resolvedMrid = ModuleRevisionId.newInstance(mrid, mdRef.getRevision()); IvyNode node = data.getNode(resolvedMrid); if (node != null && node.getModuleRevision() != null) { // this revision has already be resolved : return it if (node.getDescriptor() != null && node.getDescriptor().isDefault()) { Message.verbose("\t" + getName() + ": found already resolved revision: " + resolvedMrid + ": but it's a default one, maybe we can find a better one"); } else { Message.verbose("\t" + getName() + ": revision already resolved: " + resolvedMrid); node.getModuleRevision().getReport().setSearched(true); return node.getModuleRevision(); } } } Artifact moduleArtifact = parser.getMetadataArtifact(resolvedMrid, mdRef.getResource()); return getRepositoryCacheManager().cacheModuleDescriptor( this, mdRef, dd, moduleArtifact, downloader, getCacheOptions(data)); } protected ResourceMDParser getRMDParser(final DependencyDescriptor dd, final ResolveData data) { return new ResourceMDParser() { public MDResolvedResource parse(Resource resource, String rev) { try { ResolvedModuleRevision rmr = BasicResolver.this.parse(new ResolvedResource( resource, rev), dd, data); if (rmr == null) { return null; } else { return new MDResolvedResource(resource, rev, rmr); } } catch (ParseException e) { Message.warn("Failed to parse the file '" + resource + "': " + e.getMessage()); return null; } } }; } protected ResourceMDParser getDefaultRMDParser(final ModuleId mid) { return new ResourceMDParser() { public MDResolvedResource parse(Resource resource, String rev) { DefaultModuleDescriptor md = DefaultModuleDescriptor.newDefaultInstance(new ModuleRevisionId(mid, rev)); MetadataArtifactDownloadReport madr = new MetadataArtifactDownloadReport(md.getMetadataArtifact()); madr.setDownloadStatus(DownloadStatus.NO); madr.setSearched(true); return new MDResolvedResource(resource, rev, new ResolvedModuleRevision( BasicResolver.this, BasicResolver.this, md, madr)); } }; } // private boolean isResolved(ResolveData data, ModuleRevisionId mrid) { // IvyNode node = getSystemNode(data, mrid); // return node != null && node.getModuleRevision() != null; // } // private void checkDescriptorConsistency(ModuleRevisionId mrid, ModuleDescriptor md, ResolvedResource ivyRef) throws ParseException { boolean ok = true; StringBuffer errors = new StringBuffer(); if (!mrid.getOrganisation().equals(md.getModuleRevisionId().getOrganisation())) { Message.error("\t" + getName() + ": bad organisation found in " + ivyRef.getResource() + ": expected='" + mrid.getOrganisation() + "' found='" + md.getModuleRevisionId().getOrganisation() + "'"); errors.append("bad organisation: expected='" + mrid.getOrganisation() + "' found='" + md.getModuleRevisionId().getOrganisation() + "'; "); ok = false; } if (!mrid.getName().equals(md.getModuleRevisionId().getName())) { Message.error("\t" + getName() + ": bad module name found in " + ivyRef.getResource() + ": expected='" + mrid.getName() + " found='" + md.getModuleRevisionId().getName() + "'"); errors.append("bad module name: expected='" + mrid.getName() + "' found='" + md.getModuleRevisionId().getName() + "'; "); ok = false; } if (ivyRef.getRevision() != null && !ivyRef.getRevision().startsWith("working@")) { ModuleRevisionId expectedMrid = ModuleRevisionId .newInstance(mrid, ivyRef.getRevision()); if (!getSettings().getVersionMatcher().accept(expectedMrid, md)) { Message.error("\t" + getName() + ": bad revision found in " + ivyRef.getResource() + ": expected='" + ivyRef.getRevision() + " found='" + md.getModuleRevisionId().getRevision() + "'"); errors.append("bad revision: expected='" + ivyRef.getRevision() + "' found='" + md.getModuleRevisionId().getRevision() + "'; "); ok = false; } } if (!getSettings().getStatusManager().isStatus(md.getStatus())) { Message.error("\t" + getName() + ": bad status found in " + ivyRef.getResource() + ": '" + md.getStatus() + "'"); errors.append("bad status: '" + md.getStatus() + "'; "); ok = false; } if (!ok) { throw new ParseException("inconsistent module descriptor file found in '" + ivyRef.getResource() + "': " + errors, 0); } } protected void clearIvyAttempts() { ivyattempts.clear(); clearArtifactAttempts(); } protected void logIvyAttempt(String attempt) { ivyattempts.add(attempt); Message.verbose("\t\ttried " + attempt); } protected void logArtifactAttempt(Artifact art, String attempt) { List attempts = (List) artattempts.get(art); if (attempts == null) { attempts = new ArrayList(); artattempts.put(art, attempts); } attempts.add(attempt); Message.verbose("\t\ttried " + attempt); } protected void logAttempt(String attempt) { Artifact currentArtifact = (Artifact) IvyContext.getContext().get(getName() + ".artifact"); if (currentArtifact != null) { logArtifactAttempt(currentArtifact, attempt); } else { logIvyAttempt(attempt); } } public void reportFailure() { Message.warn("==== " + getName() + ": tried"); for (ListIterator iter = ivyattempts.listIterator(); iter.hasNext();) { String m = (String) iter.next(); Message.warn(" " + m); } for (Iterator iter = artattempts.keySet().iterator(); iter.hasNext();) { Artifact art = (Artifact) iter.next(); List attempts = (List) artattempts.get(art); if (attempts != null) { Message.warn(" -- artifact " + art + ":"); for (ListIterator iterator = attempts.listIterator(); iterator.hasNext();) { String m = (String) iterator.next(); Message.warn(" " + m); } } } } public void reportFailure(Artifact art) { Message.warn("==== " + getName() + ": tried"); List attempts = (List) artattempts.get(art); if (attempts != null) { for (ListIterator iter = attempts.listIterator(); iter.hasNext();) { String m = (String) iter.next(); Message.warn(" " + m); } } } protected boolean acceptLatest() { return true; } public DownloadReport download(Artifact[] artifacts, DownloadOptions options) { RepositoryCacheManager cacheManager = getRepositoryCacheManager(); clearArtifactAttempts(); DownloadReport dr = new DownloadReport(); for (int i = 0; i < artifacts.length; i++) { ArtifactDownloadReport adr = cacheManager.download( artifacts[i], artifactResourceResolver, downloader, getCacheDownloadOptions(options)); if (DownloadStatus.FAILED == adr.getDownloadStatus()) { if (!ArtifactDownloadReport.MISSING_ARTIFACT.equals(adr.getDownloadDetails())) { Message.warn("\t" + adr); } } else if (DownloadStatus.NO == adr.getDownloadStatus()) { Message.verbose("\t" + adr); } else if (LogOptions.LOG_QUIET.equals(options.getLog())) { Message.verbose("\t" + adr); } else { Message.info("\t" + adr); } dr.addArtifactReport(adr); checkInterrupted(); } return dr; } protected void clearArtifactAttempts() { artattempts.clear(); } public boolean exists(Artifact artifact) { ResolvedResource artifactRef = getArtifactRef(artifact, null); if (artifactRef != null) { return artifactRef.getResource().exists(); } return false; } protected long getPublicationDate(ModuleDescriptor md, DependencyDescriptor dd, ResolveData data) { if (md.getPublicationDate() != null) { return md.getPublicationDate().getTime(); } ResolvedResource artifactRef = findFirstArtifactRef(md, dd, data); if (artifactRef != null) { return artifactRef.getLastModified(); } return -1; } public String toString() { return getName(); } public String[] listTokenValues(String token, Map otherTokenValues) { Collection ret = findNames(otherTokenValues, token); return (String[]) ret.toArray(new String[ret.size()]); } public OrganisationEntry[] listOrganisations() { Collection names = findNames(Collections.EMPTY_MAP, IvyPatternHelper.ORGANISATION_KEY); OrganisationEntry[] ret = new OrganisationEntry[names.size()]; int i = 0; for (Iterator iter = names.iterator(); iter.hasNext(); i++) { String org = (String) iter.next(); ret[i] = new OrganisationEntry(this, org); } return ret; } public ModuleEntry[] listModules(OrganisationEntry org) { Map tokenValues = new HashMap(); tokenValues.put(IvyPatternHelper.ORGANISATION_KEY, org.getOrganisation()); Collection names = findNames(tokenValues, IvyPatternHelper.MODULE_KEY); ModuleEntry[] ret = new ModuleEntry[names.size()]; int i = 0; for (Iterator iter = names.iterator(); iter.hasNext(); i++) { String name = (String) iter.next(); ret[i] = new ModuleEntry(org, name); } return ret; } public RevisionEntry[] listRevisions(ModuleEntry mod) { Map tokenValues = new HashMap(); tokenValues.put(IvyPatternHelper.ORGANISATION_KEY, mod.getOrganisation()); tokenValues.put(IvyPatternHelper.MODULE_KEY, mod.getModule()); Collection names = findNames(tokenValues, IvyPatternHelper.REVISION_KEY); RevisionEntry[] ret = new RevisionEntry[names.size()]; int i = 0; for (Iterator iter = names.iterator(); iter.hasNext(); i++) { String name = (String) iter.next(); ret[i] = new RevisionEntry(mod, name); } return ret; } protected abstract Collection findNames(Map tokenValues, String token); protected ResolvedResource findFirstArtifactRef(ModuleDescriptor md, DependencyDescriptor dd, ResolveData data) { ResolvedResource ret = null; String[] conf = md.getConfigurationsNames(); for (int i = 0; i < conf.length; i++) { Artifact[] artifacts = md.getArtifacts(conf[i]); for (int j = 0; j < artifacts.length; j++) { ret = getArtifactRef(artifacts[j], data.getDate()); if (ret != null) { return ret; } } } return null; } protected long getAndCheck(Resource resource, File dest) throws IOException { long size = get(resource, dest); String[] checksums = getChecksumAlgorithms(); boolean checked = false; for (int i = 0; i < checksums.length && !checked; i++) { checked = check(resource, dest, checksums[i]); } return size; } /** * Checks the given resource checksum if a checksum resource exists. * * @param resource * the resource to check * @param dest * the file where the resource has been downloaded * @param algorithm * the checksum algorithm to use * @return true if the checksum has been successfully checked, false if the checksum wasn't * available * @throws IOException * if a checksum exist but do not match the downloaded file checksum */ private boolean check(Resource resource, File dest, String algorithm) throws IOException { Resource csRes = resource.clone(resource.getName() + "." + algorithm); if (csRes.exists()) { Message.debug(algorithm + " file found for " + resource + ": checking..."); File csFile = File.createTempFile("ivytmp", algorithm); try { get(csRes, csFile); try { ChecksumHelper.check(dest, csFile, algorithm); Message.verbose(algorithm + " OK for " + resource); return true; } catch (IOException ex) { dest.delete(); throw ex; } } finally { csFile.delete(); } } else { return false; } } protected ResolvedResource getArtifactRef(Artifact artifact, Date date) { IvyContext.getContext().set(getName() + ".artifact", artifact); try { ResolvedResource ret = findArtifactRef(artifact, date); if (ret == null && artifact.getUrl() != null) { URL url = artifact.getUrl(); Message.verbose("\tusing url for " + artifact + ": " + url); logArtifactAttempt(artifact, url.toExternalForm()); ret = new ResolvedResource(new URLResource(url), artifact.getModuleRevisionId() .getRevision()); } return ret; } finally { IvyContext.getContext().set(getName() + ".artifact", null); } } protected abstract ResolvedResource findArtifactRef(Artifact artifact, Date date); protected abstract long get(Resource resource, File dest) throws IOException; public boolean isCheckconsistency() { return checkconsistency; } public void setCheckconsistency(boolean checkConsitency) { checkconsistency = checkConsitency; } public boolean isAllownomd() { return allownomd; } public void setAllownomd(boolean b) { allownomd = b; } public String[] getChecksumAlgorithms() { String csDef = checksums == null ? getSettings().getVariable("ivy.checksums") : checksums; if (csDef == null) { return new String[0]; } // csDef is a comma separated list of checksum algorithms to use with this resolver // we parse and return it as a String[] String[] checksums = csDef.split(","); List algos = new ArrayList(); for (int i = 0; i < checksums.length; i++) { String cs = checksums[i].trim(); if (!"".equals(cs) && !"none".equals(cs)) { algos.add(cs); } } return (String[]) algos.toArray(new String[algos.size()]); } public void setChecksums(String checksums) { this.checksums = checksums; } private final ArtifactResourceResolver artifactResourceResolver = new ArtifactResourceResolver() { public ResolvedResource resolve(Artifact artifact) { artifact = fromSystem(artifact); return getArtifactRef(artifact, null); } }; private final ResourceDownloader downloader = new ResourceDownloader() { public void download(Artifact artifact, Resource resource, File dest) throws IOException { if (dest.exists()) { dest.delete(); } File part = new File(dest.getAbsolutePath() + ".part"); if (resource.getName().equals( String.valueOf(artifact.getUrl()))) { if (part.getParentFile() != null) { part.getParentFile().mkdirs(); } extartifactrep.get(resource.getName(), part); } else { getAndCheck(resource, part); } if (!part.renameTo(dest)) { throw new IOException( "impossible to move part file to definitive one: " + part + " -> " + dest); } } }; }
Attempt to fix IVY-773 (Extra Attributes are not available to resolver after resolve if cache was empty) git-svn-id: bddd0b838a5b7898c5897d85c562f956f46262e5@640519 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/ivy/plugins/resolver/BasicResolver.java
Attempt to fix IVY-773 (Extra Attributes are not available to resolver after resolve if cache was empty)
Java
apache-2.0
e50dd0277f13ad39827e9496934ff30585516b63
0
michalkurka/h2o-3,nilbody/h2o-3,tarasane/h2o-3,brightchen/h2o-3,ChristosChristofidis/h2o-3,mathemage/h2o-3,datachand/h2o-3,YzPaul3/h2o-3,bospetersen/h2o-3,mathemage/h2o-3,bikash/h2o-dev,nilbody/h2o-3,weaver-viii/h2o-3,tarasane/h2o-3,madmax983/h2o-3,mrgloom/h2o-3,datachand/h2o-3,madmax983/h2o-3,ChristosChristofidis/h2o-3,kyoren/https-github.com-h2oai-h2o-3,datachand/h2o-3,PawarPawan/h2o-v3,spennihana/h2o-3,PawarPawan/h2o-v3,bikash/h2o-dev,h2oai/h2o-3,madmax983/h2o-3,brightchen/h2o-3,jangorecki/h2o-3,bikash/h2o-dev,brightchen/h2o-3,kyoren/https-github.com-h2oai-h2o-3,tarasane/h2o-3,ChristosChristofidis/h2o-3,weaver-viii/h2o-3,jangorecki/h2o-3,junwucs/h2o-3,jangorecki/h2o-3,bospetersen/h2o-3,kyoren/https-github.com-h2oai-h2o-3,madmax983/h2o-3,tarasane/h2o-3,mrgloom/h2o-3,tarasane/h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,h2oai/h2o-flow,printedheart/h2o-3,h2oai/h2o-flow,PawarPawan/h2o-v3,printedheart/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,madmax983/h2o-3,h2oai/h2o-3,kyoren/https-github.com-h2oai-h2o-3,printedheart/h2o-3,YzPaul3/h2o-3,bikash/h2o-dev,printedheart/h2o-3,junwucs/h2o-3,datachand/h2o-3,PawarPawan/h2o-v3,YzPaul3/h2o-3,pchmieli/h2o-3,spennihana/h2o-3,spennihana/h2o-3,madmax983/h2o-3,mathemage/h2o-3,datachand/h2o-3,weaver-viii/h2o-3,mrgloom/h2o-3,nilbody/h2o-3,pchmieli/h2o-3,mathemage/h2o-3,printedheart/h2o-flow,mathemage/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,spennihana/h2o-3,ChristosChristofidis/h2o-3,printedheart/h2o-flow,h2oai/h2o-3,ChristosChristofidis/h2o-3,bospetersen/h2o-3,junwucs/h2o-flow,michalkurka/h2o-3,h2oai/h2o-dev,ChristosChristofidis/h2o-3,michalkurka/h2o-3,mrgloom/h2o-3,tarasane/h2o-3,h2oai/h2o-dev,nilbody/h2o-3,spennihana/h2o-3,nilbody/h2o-3,junwucs/h2o-3,weaver-viii/h2o-3,PawarPawan/h2o-v3,kyoren/https-github.com-h2oai-h2o-3,mathemage/h2o-3,brightchen/h2o-3,tarasane/h2o-3,h2oai/h2o-dev,ChristosChristofidis/h2o-3,bospetersen/h2o-3,printedheart/h2o-flow,mrgloom/h2o-3,madmax983/h2o-3,mrgloom/h2o-3,junwucs/h2o-3,michalkurka/h2o-3,nilbody/h2o-flow,bospetersen/h2o-3,michalkurka/h2o-3,datachand/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,nilbody/h2o-flow,spennihana/h2o-3,bikash/h2o-dev,junwucs/h2o-3,mrgloom/h2o-3,jangorecki/h2o-3,brightchen/h2o-3,pchmieli/h2o-3,spennihana/h2o-3,YzPaul3/h2o-3,printedheart/h2o-3,pchmieli/h2o-3,YzPaul3/h2o-3,nilbody/h2o-flow,junwucs/h2o-flow,kyoren/https-github.com-h2oai-h2o-3,brightchen/h2o-3,printedheart/h2o-3,nilbody/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,bospetersen/h2o-3,mathemage/h2o-3,michalkurka/h2o-3,bospetersen/h2o-3,brightchen/h2o-3,PawarPawan/h2o-v3,weaver-viii/h2o-3,datachand/h2o-3,printedheart/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,junwucs/h2o-flow,h2oai/h2o-dev,pchmieli/h2o-3,h2oai/h2o-3,pchmieli/h2o-3,pchmieli/h2o-3,weaver-viii/h2o-3,kyoren/https-github.com-h2oai-h2o-3,h2oai/h2o-dev,junwucs/h2o-3,junwucs/h2o-3,bikash/h2o-dev,nilbody/h2o-3,PawarPawan/h2o-v3,weaver-viii/h2o-3
package water; import java.util.Arrays; import water.util.Log; /** Lockable Keys - locked during long running jobs, to prevent overwriting * in-use keys. e.g. model-building: expected to read-lock input ValueArray * and Frames, and write-lock the output Model. Parser should write-lock the * output VA/Frame, to guard against double-parsing. * * Supports: * lock-and-delete-old-and-update (for new Keys) * lock-and-delete (for removing old Keys) * unlock * * @author <a href="mailto:[email protected]"></a> * @version 1.0 */ public abstract class Lockable<T extends Lockable<T>> extends Keyed { /** Write-locker job is in _jobs[0 ]. Can be null locker. * Read -locker jobs are in _jobs[1+]. * Unlocked has _jobs equal to null. * Only 1 situation will be true at a time; atomically updated. * Transient, because this data is only valid on the master node. */ //@API(help="Jobs locking this key") public transient Key _lockers[]; public Lockable( Key key ) { super(key); } // ----------- // Atomic create+overwrite of prior key. // If prior key exists, block until acquire a write-lock. // Then call remove, removing all of a prior key. // The replace this object as the new Lockable, still write-locked. // "locker" can be null, meaning the special no-Job locker; for use by expected-fast operations // // Example: write-lock & remove an old Frame, and replace with a new locked Frame // Local-Node Master-Node // (1) new,old -->write_lock(job)--> old // (2) new,old.waiting... new,old+job-locked atomic xtn loop // (3) old.remove onSuccess // (4) new <--update success <-- new+job-locked // Write-lock 'this', returns OLD guy public Lockable write_lock( Key job_key ) { Log.debug("write-lock "+_key+" by job "+job_key); return ((PriorWriteLock)new PriorWriteLock(job_key).invoke(_key))._old; } // Write-lock 'this', delete any old thing, returns NEW guy public T delete_and_lock( Key job_key ) { Lockable old = write_lock(job_key); if( old != null ) { Log.debug("lock-then-clear "+_key+" by job "+job_key); old.remove(new Futures()).blockForPending(); } return (T)this; } // Will fail if locked by anybody. public static void delete( Key key ) { Value val = DKV.get(key); if( val==null ) return; ((Lockable)val.get()).delete(); } public void delete( ) { delete(null,0.0f); } // Will fail if locked by anybody other than 'job_key' public void delete( Key job_key, float dummy ) { if( _key != null ) { Log.debug("lock-then-delete "+_key+" by job "+job_key); new PriorWriteLock(job_key).invoke(_key); } remove(new Futures()).blockForPending(); } // Obtain the write-lock on _key, which may already exist, using the current 'this'. private class PriorWriteLock extends TAtomic<Lockable> { private final Key _job_key; // Job doing the locking private Lockable _old; // Return the old thing, for deleting later private PriorWriteLock( Key job_key ) { _job_key = job_key; } @Override public Lockable atomic(Lockable old) { _old = old; if( old != null ) { // Prior Lockable exists? assert !old.is_wlocked(_job_key) : "Key "+_key+" already locked; lks="+Arrays.toString(old._lockers); // No double locking by same job if( old.is_locked(_job_key) ) // read-locked by self? (double-write-lock checked above) old.set_unlocked(old._lockers,_job_key); // Remove read-lock; will atomically upgrade to write-lock if( !old.is_unlocked() ) // Blocking for some other Job to finish??? throw new IllegalArgumentException(old.errStr()+" "+_key+" is already in use. Unable to use it now. Consider using a different destination name."); } // Update & set the new value set_write_lock(_job_key); return Lockable.this; } } // ----------- // Atomically get a read-lock, preventing future deletes or updates public static void read_lock( Key k, Key job_key ) { Value val = DKV.get(k); if( val.isLockable() ) ((Lockable)val.get()).read_lock(job_key); // Lockable being locked } public void read_lock( Key job_key ) { if( _key != null ) { Log.debug("shared-read-lock "+_key+" by job "+job_key); new ReadLock(job_key).invoke(_key); } } // Obtain read-lock static private class ReadLock extends TAtomic<Lockable> { final Key _job_key; // Job doing the unlocking ReadLock( Key job_key ) { _job_key = job_key; } @Override public Lockable atomic(Lockable old) { if( old == null ) throw new IllegalArgumentException("Nothing to lock!"); if( old.is_wlocked() ) throw new IllegalArgumentException( old.errStr()+" "+_key+" is being created; Unable to read it now."); old.set_read_lock(_job_key); return old; } } // ----------- // Atomically set a new version of self public void update( Key job_key ) { Log.debug("update write-locked "+_key+" by job "+job_key); new Update(job_key).invoke(_key); } // Freshen 'this' and leave locked private class Update extends TAtomic<Lockable> { final Key _job_key; // Job doing the unlocking Update( Key job_key ) { _job_key = job_key; } @Override public Lockable atomic(Lockable old) { assert old != null : "Cannot update - Lockable is null!"; assert old.is_wlocked() : "Cannot update - Lockable is not write-locked!"; _lockers = old._lockers; // Keep lock state return Lockable.this; // Freshen this } } // ----------- // Atomically set a new version of self & unlock. public void unlock( Key job_key ) { if( _key != null ) { Log.debug("unlock "+_key+" by job "+job_key); new Unlock(job_key).invoke(_key); } } // Freshen 'this' and unlock private class Unlock extends TAtomic<Lockable> { final Key _job_key; // Job doing the unlocking Unlock( Key job_key ) { _job_key = job_key; } @Override public Lockable atomic(Lockable old) { assert old != null : "Trying to unlock null!"; assert old.is_locked(_job_key) : "Can't unlock: Not locked!"; set_unlocked(old._lockers,_job_key); return Lockable.this; } } // ----------- // Accessers for locking state. Minimal self-checking; primitive results. private boolean is_locked(Key job_key) { if( _lockers==null ) return false; for( int i=(_lockers.length==1?0:1); i<_lockers.length; i++ ) { Key k = _lockers[i]; if( job_key==k || (job_key != null && k != null && job_key.equals(k)) ) return true; } return false; } private boolean is_wlocked() { return _lockers!=null && _lockers.length==1; } private boolean is_wlocked(Key job_key) { return is_wlocked() && (_lockers[0] == job_key || _lockers[0] != null && _lockers[0].equals(job_key)); } private boolean is_unlocked() { return _lockers== null; } private void set_write_lock( Key job_key ) { _lockers=new Key[]{job_key}; assert is_locked(job_key); } private void set_read_lock(Key job_key) { assert !is_locked(job_key); // no double locking assert !is_wlocked(); // not write locked _lockers = _lockers == null ? new Key[2] : Arrays.copyOf(_lockers,_lockers.length+1); _lockers[_lockers.length-1] = job_key; assert is_locked(job_key); } private void set_unlocked(Key lks[], Key job_key) { if( lks.length==1 ) { // Is write-locked? assert job_key==lks[0] || job_key.equals(lks[0]); _lockers = null; // Then unlocked } else if( lks.length==2 ) { // One reader assert lks[0]==null; // Not write-locked assert lks[1]==job_key || (job_key != null && job_key.equals(lks[1])); _lockers = null; // So unlocked } else { // Else one of many readers assert lks.length>2; _lockers = Arrays.copyOf(lks,lks.length-1); int j=1; // Skip the initial null slot for( int i=1; i<lks.length; i++ ) if(job_key != null && !job_key.equals(lks[i]) || (job_key == null && lks[i] != null)){ _lockers[j++] = lks[i]; } assert j==lks.length-1; // Was locked exactly once } assert !is_locked(job_key); } // Pretty string when locking fails protected abstract String errStr(); }
h2o-core/src/main/water/Lockable.java
package water; import java.util.Arrays; import water.util.Log; /** Lockable Keys - locked during long running jobs, to prevent overwriting * in-use keys. e.g. model-building: expected to read-lock input ValueArray * and Frames, and write-lock the output Model. Parser should write-lock the * output VA/Frame, to guard against double-parsing. * * Supports: * lock-and-delete-old-and-update (for new Keys) * lock-and-delete (for removing old Keys) * unlock * * @author <a href="mailto:[email protected]"></a> * @version 1.0 */ public abstract class Lockable<T extends Lockable<T>> extends Keyed { /** Write-locker job is in _jobs[0 ]. Can be null locker. * Read -locker jobs are in _jobs[1+]. * Unlocked has _jobs equal to null. * Only 1 situation will be true at a time; atomically updated. * Transient, because this data is only valid on the master node. */ //@API(help="Jobs locking this key") public transient Key _lockers[]; public Lockable( Key key ) { super(key); } // ----------- // Atomic create+overwrite of prior key. // If prior key exists, block until acquire a write-lock. // Then call remove, removing all of a prior key. // The replace this object as the new Lockable, still write-locked. // "locker" can be null, meaning the special no-Job locker; for use by expected-fast operations // // Example: write-lock & remove an old Frame, and replace with a new locked Frame // Local-Node Master-Node // (1) new,old -->write_lock(job)--> old // (2) new,old.waiting... new,old+job-locked atomic xtn loop // (3) old.remove onSuccess // (4) new <--update success <-- new+job-locked // Write-lock 'this', returns OLD guy public Lockable write_lock( Key job_key ) { Log.debug("write-lock "+_key+" by job "+job_key); return ((PriorWriteLock)new PriorWriteLock(job_key).invoke(_key))._old; } // Write-lock 'this', delete any old thing, returns NEW guy public T delete_and_lock( Key job_key ) { Lockable old = write_lock(job_key); if( old != null ) { Log.debug("lock-then-clear "+_key+" by job "+job_key); old.remove(new Futures()).blockForPending(); } return (T)this; } // Will fail if locked by anybody. public static void delete( Key key ) { Value val = DKV.get(key); if( val==null ) return; ((Lockable)val.get()).delete(); } public void delete( ) { delete(null,0.0f); } // Will fail if locked by anybody other than 'job_key' public void delete( Key job_key, float dummy ) { if( _key != null ) { Log.debug("lock-then-delete "+_key+" by job "+job_key); new PriorWriteLock(job_key).invoke(_key); } remove(new Futures()).blockForPending(); } // Obtain the write-lock on _key, which may already exist, using the current 'this'. private class PriorWriteLock extends TAtomic<Lockable> { private final Key _job_key; // Job doing the locking private Lockable _old; // Return the old thing, for deleting later private PriorWriteLock( Key job_key ) { _job_key = job_key; } @Override public Lockable atomic(Lockable old) { _old = old; if( old != null ) { // Prior Lockable exists? assert !old.is_wlocked(_job_key) : "Key "+_key+" already locked; lks="+Arrays.toString(old._lockers); // No double locking by same job if( old.is_locked(_job_key) ) // read-locked by self? (double-write-lock checked above) old.set_unlocked(old._lockers,_job_key); // Remove read-lock; will atomically upgrade to write-lock if( !old.is_unlocked() ) // Blocking for some other Job to finish??? throw new IllegalArgumentException(old.errStr()+" "+_key+" is already in use. Unable to use it now. Consider using a different destination name."); } // Update & set the new value set_write_lock(_job_key); return Lockable.this; } } // ----------- // Atomically get a read-lock, preventing future deletes or updates public static void read_lock( Key k, Key job_key ) { Value val = DKV.get(k); if( val.isLockable() ) ((Lockable)val.get()).read_lock(job_key); // Lockable being locked } public void read_lock( Key job_key ) { if( _key != null ) { Log.debug("shared-read-lock "+_key+" by job "+job_key); new ReadLock(job_key).invoke(_key); } } // Obtain read-lock static private class ReadLock extends TAtomic<Lockable> { final Key _job_key; // Job doing the unlocking ReadLock( Key job_key ) { _job_key = job_key; } @Override public Lockable atomic(Lockable old) { if( old == null ) throw new IllegalArgumentException("Nothing to lock!"); if( old.is_wlocked() ) throw new IllegalArgumentException( old.errStr()+" "+_key+" is being created; Unable to read it now."); old.set_read_lock(_job_key); return old; } } // ----------- // Atomically set a new version of self public void update( Key job_key ) { Log.debug("update write-locked "+_key+" by job "+job_key); new Update(job_key).invoke(_key); } // Freshen 'this' and leave locked private class Update extends TAtomic<Lockable> { final Key _job_key; // Job doing the unlocking Update( Key job_key ) { _job_key = job_key; } @Override public Lockable atomic(Lockable old) { assert old != null && old.is_wlocked() : "Cannot update - Lockable is null or already locked!"; _lockers = old._lockers; // Keep lock state return Lockable.this; // Freshen this } } // ----------- // Atomically set a new version of self & unlock. public void unlock( Key job_key ) { if( _key != null ) { Log.debug("unlock "+_key+" by job "+job_key); new Unlock(job_key).invoke(_key); } } // Freshen 'this' and unlock private class Unlock extends TAtomic<Lockable> { final Key _job_key; // Job doing the unlocking Unlock( Key job_key ) { _job_key = job_key; } @Override public Lockable atomic(Lockable old) { assert old.is_locked(_job_key); set_unlocked(old._lockers,_job_key); return Lockable.this; } } // ----------- // Accessers for locking state. Minimal self-checking; primitive results. private boolean is_locked(Key job_key) { if( _lockers==null ) return false; for( int i=(_lockers.length==1?0:1); i<_lockers.length; i++ ) { Key k = _lockers[i]; if( job_key==k || (job_key != null && k != null && job_key.equals(k)) ) return true; } return false; } private boolean is_wlocked() { return _lockers!=null && _lockers.length==1; } private boolean is_wlocked(Key job_key) { return is_wlocked() && (_lockers[0] == job_key || _lockers[0] != null && _lockers[0].equals(job_key)); } private boolean is_unlocked() { return _lockers== null; } private void set_write_lock( Key job_key ) { _lockers=new Key[]{job_key}; assert is_locked(job_key); } private void set_read_lock(Key job_key) { assert !is_locked(job_key); // no double locking assert !is_wlocked(); // not write locked _lockers = _lockers == null ? new Key[2] : Arrays.copyOf(_lockers,_lockers.length+1); _lockers[_lockers.length-1] = job_key; assert is_locked(job_key); } private void set_unlocked(Key lks[], Key job_key) { if( lks.length==1 ) { // Is write-locked? assert job_key==lks[0] || job_key.equals(lks[0]); _lockers = null; // Then unlocked } else if( lks.length==2 ) { // One reader assert lks[0]==null; // Not write-locked assert lks[1]==job_key || (job_key != null && job_key.equals(lks[1])); _lockers = null; // So unlocked } else { // Else one of many readers assert lks.length>2; _lockers = Arrays.copyOf(lks,lks.length-1); int j=1; // Skip the initial null slot for( int i=1; i<lks.length; i++ ) if(job_key != null && !job_key.equals(lks[i]) || (job_key == null && lks[i] != null)){ _lockers[j++] = lks[i]; } assert j==lks.length-1; // Was locked exactly once } assert !is_locked(job_key); } // Pretty string when locking fails protected abstract String errStr(); }
Add assertion messages for locking.
h2o-core/src/main/water/Lockable.java
Add assertion messages for locking.
Java
apache-2.0
a7f1ae6ffc330a101f9d7f64fd6cceaf5df90380
0
osmdroid/osmdroid,osmdroid/osmdroid,osmdroid/osmdroid,osmdroid/osmdroid
package org.osmdroid.bonuspack.kml; import java.io.Writer; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Set; import org.osmdroid.bonuspack.overlays.Marker; import org.osmdroid.bonuspack.overlays.Polygon; import org.osmdroid.bonuspack.overlays.Polyline; import org.osmdroid.util.BoundingBoxE6; import org.osmdroid.util.GeoPoint; import org.osmdroid.views.MapView; import org.osmdroid.views.overlay.Overlay; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import android.os.Parcel; import android.os.Parcelable; /** * KML Placemark. Support the following Geometry: Point, LineString, and Polygon. * @author M.Kergall */ public class KmlPlacemark extends KmlFeature implements Cloneable, Parcelable { /** the KML Geometry of the Placemark. Null if none. */ public KmlGeometry mGeometry; /** constructs a Placemark of unknown Geometry */ public KmlPlacemark(){ super(); } /** * constructs a Placemark with a Point Geometry. * @param position position of the Point */ public KmlPlacemark(GeoPoint position){ this(); mGeometry = new KmlPoint(position); } /** constructs a Placemark from a Marker, as a KML Point */ public KmlPlacemark(Marker marker){ this(marker.getPosition()); mName = marker.getTitle(); mDescription = marker.getSnippet(); mVisibility = marker.isEnabled(); //TODO: Style / IconStyle => transparency, hotspot, bearing. } /** constructs a Placemark from a Polygon overlay, as a KML Polygon */ public KmlPlacemark(Polygon polygon, KmlDocument kmlDoc){ this(); mName = polygon.getTitle(); mDescription = polygon.getSnippet(); mGeometry = new KmlPolygon(); mGeometry.mCoordinates = (ArrayList<GeoPoint>)polygon.getPoints(); ((KmlPolygon)mGeometry).mHoles = (ArrayList<ArrayList<GeoPoint>>)polygon.getHoles(); mVisibility = polygon.isEnabled(); //Style: Style style = new Style(); style.mPolyStyle = new ColorStyle(polygon.getFillColor()); style.mLineStyle = new LineStyle(polygon.getStrokeColor(), polygon.getStrokeWidth()); mStyle = kmlDoc.addStyle(style); } /** constructs a Placemark from a Polyline overlay, as a KML LineString */ public KmlPlacemark(Polyline polyline, KmlDocument kmlDoc){ this(); mName = polyline.getTitle(); mDescription = polyline.getSnippet(); mGeometry = new KmlLineString(); mGeometry.mCoordinates = (ArrayList<GeoPoint>)polyline.getPoints(); mVisibility = polyline.isEnabled(); //Style: Style style = new Style(); style.mLineStyle = new LineStyle(polyline.getColor(), polyline.getWidth()); mStyle = kmlDoc.addStyle(style); } /** GeoJSON constructor */ public KmlPlacemark(JsonObject json){ this(); if (json.has("id")) mId = json.get("id").getAsString(); JsonObject geometry = json.getAsJsonObject("geometry"); if (geometry != null) { mGeometry = KmlGeometry.parseGeoJSON(geometry); } if (json.has("properties")){ //Parse properties: JsonObject properties = json.getAsJsonObject("properties"); Set<Map.Entry<String,JsonElement>> entrySet = properties.entrySet(); for (Map.Entry<String,JsonElement> entry:entrySet){ String key = entry.getKey(); JsonElement je = entry.getValue(); String value; try { value = je.getAsString(); } catch (Exception e){ value = je.toString(); } if (key!=null && value!=null) setExtendedData(key, value); } //Put "name" property in standard KML format: if (mExtendedData!=null && mExtendedData.containsKey("name")){ mName = mExtendedData.get("name"); mExtendedData.remove("name"); } } } @Override public BoundingBoxE6 getBoundingBox(){ if (mGeometry != null) return mGeometry.getBoundingBox(); else return null; } @Override public Overlay buildOverlay(MapView map, Style defaultStyle, Styler styler, KmlDocument kmlDocument){ if (mGeometry != null) return mGeometry.buildOverlay(map, defaultStyle, styler, this, kmlDocument); else return null; } @Override public void writeKMLSpecifics(Writer writer){ if (mGeometry != null) mGeometry.saveAsKML(writer); } protected JsonObject geoJSONProperties(){ try { JsonObject json = new JsonObject(); if (mName != null){ json.addProperty("name", mName); } if (mExtendedData != null){ for (HashMap.Entry<String, String> entry : mExtendedData.entrySet()) { String name = entry.getKey(); String value = entry.getValue(); json.addProperty(name, value); } } return json; } catch (Exception e) { e.printStackTrace(); return null; } } /** @return this as a GeoJSON object. */ @Override public JsonObject asGeoJSON(boolean isRoot){ JsonObject json = new JsonObject(); json.addProperty("type", "Feature"); if (mId != null) json.addProperty("id", mId); json.add("geometry", mGeometry.asGeoJSON()); json.add("properties", geoJSONProperties()); return json; } //Cloneable implementation ------------------------------------ @Override public KmlPlacemark clone(){ KmlPlacemark kmlPlacemark = (KmlPlacemark)super.clone(); if (mGeometry != null) kmlPlacemark.mGeometry = mGeometry.clone(); return kmlPlacemark; } //Parcelable implementation ------------ @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeParcelable(mGeometry, flags); } public static final Parcelable.Creator<KmlPlacemark> CREATOR = new Parcelable.Creator<KmlPlacemark>() { @Override public KmlPlacemark createFromParcel(Parcel source) { return new KmlPlacemark(source); } @Override public KmlPlacemark[] newArray(int size) { return new KmlPlacemark[size]; } }; public KmlPlacemark(Parcel in){ super(in); mGeometry = in.readParcelable(KmlGeometry.class.getClassLoader()); } }
osmdroid-android/src/org/osmdroid/bonuspack/kml/KmlPlacemark.java
package org.osmdroid.bonuspack.kml; import java.io.Writer; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.Set; import org.osmdroid.bonuspack.overlays.Marker; import org.osmdroid.bonuspack.overlays.Polygon; import org.osmdroid.bonuspack.overlays.Polyline; import org.osmdroid.util.BoundingBoxE6; import org.osmdroid.util.GeoPoint; import org.osmdroid.views.MapView; import org.osmdroid.views.overlay.Overlay; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import android.os.Parcel; import android.os.Parcelable; /** * KML Placemark. Support the following Geometry: Point, LineString, and Polygon. * @author M.Kergall */ public class KmlPlacemark extends KmlFeature implements Cloneable, Parcelable { /** the KML Geometry of the Placemark. Null if none. */ public KmlGeometry mGeometry; /** constructs a Placemark of unknown Geometry */ public KmlPlacemark(){ super(); } /** * constructs a Placemark with a Point Geometry. * @param position position of the Point */ public KmlPlacemark(GeoPoint position){ this(); mGeometry = new KmlPoint(position); } /** constructs a Placemark from a Marker, as a KML Point */ public KmlPlacemark(Marker marker){ this(marker.getPosition()); mName = marker.getTitle(); mDescription = marker.getSnippet(); mVisibility = marker.isEnabled(); //TODO: Style / IconStyle => transparency, hotspot, bearing. } /** constructs a Placemark from a Polygon overlay, as a KML Polygon */ public KmlPlacemark(Polygon polygon, KmlDocument kmlDoc){ this(); mName = polygon.getTitle(); mDescription = polygon.getSnippet(); mGeometry = new KmlPolygon(); mGeometry.mCoordinates = (ArrayList<GeoPoint>)polygon.getPoints(); ((KmlPolygon)mGeometry).mHoles = (ArrayList<ArrayList<GeoPoint>>)polygon.getHoles(); mVisibility = polygon.isEnabled(); //Style: Style style = new Style(); style.mPolyStyle = new ColorStyle(polygon.getFillColor()); style.mLineStyle = new LineStyle(polygon.getStrokeColor(), polygon.getStrokeWidth()); mStyle = kmlDoc.addStyle(style); } /** constructs a Placemark from a Polyline overlay, as a KML LineString */ public KmlPlacemark(Polyline polyline, KmlDocument kmlDoc){ this(); mName = polyline.getTitle(); mDescription = polyline.getSnippet(); mGeometry = new KmlLineString(); mGeometry.mCoordinates = (ArrayList<GeoPoint>)polyline.getPoints(); mVisibility = polyline.isEnabled(); //Style: Style style = new Style(); style.mLineStyle = new LineStyle(polyline.getColor(), polyline.getWidth()); mStyle = kmlDoc.addStyle(style); } /** GeoJSON constructor */ public KmlPlacemark(JsonObject json){ this(); if (json.has("id")) mId = json.get("id").getAsString(); JsonObject geometry = json.getAsJsonObject("geometry"); if (geometry != null) { mGeometry = KmlGeometry.parseGeoJSON(geometry); } if (json.has("properties")){ //Parse properties: JsonObject properties = json.getAsJsonObject("properties"); Set<Map.Entry<String,JsonElement>> entrySet = properties.entrySet(); for (Map.Entry<String,JsonElement> entry:entrySet){ String key = entry.getKey(); String value = entry.getValue().getAsString(); if (key!=null && value!=null) setExtendedData(key, value); } //Put "name" property in standard KML format: if (mExtendedData!=null && mExtendedData.containsKey("name")){ mName = mExtendedData.get("name"); mExtendedData.remove("name"); } } } @Override public BoundingBoxE6 getBoundingBox(){ if (mGeometry != null) return mGeometry.getBoundingBox(); else return null; } @Override public Overlay buildOverlay(MapView map, Style defaultStyle, Styler styler, KmlDocument kmlDocument){ if (mGeometry != null) return mGeometry.buildOverlay(map, defaultStyle, styler, this, kmlDocument); else return null; } @Override public void writeKMLSpecifics(Writer writer){ if (mGeometry != null) mGeometry.saveAsKML(writer); } protected JsonObject geoJSONProperties(){ try { JsonObject json = new JsonObject(); if (mName != null){ json.addProperty("name", mName); } if (mExtendedData != null){ for (HashMap.Entry<String, String> entry : mExtendedData.entrySet()) { String name = entry.getKey(); String value = entry.getValue(); json.addProperty(name, value); } } return json; } catch (Exception e) { e.printStackTrace(); return null; } } /** @return this as a GeoJSON object. */ @Override public JsonObject asGeoJSON(boolean isRoot){ JsonObject json = new JsonObject(); json.addProperty("type", "Feature"); if (mId != null) json.addProperty("id", mId); json.add("geometry", mGeometry.asGeoJSON()); json.add("properties", geoJSONProperties()); return json; } //Cloneable implementation ------------------------------------ @Override public KmlPlacemark clone(){ KmlPlacemark kmlPlacemark = (KmlPlacemark)super.clone(); if (mGeometry != null) kmlPlacemark.mGeometry = mGeometry.clone(); return kmlPlacemark; } //Parcelable implementation ------------ @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeParcelable(mGeometry, flags); } public static final Parcelable.Creator<KmlPlacemark> CREATOR = new Parcelable.Creator<KmlPlacemark>() { @Override public KmlPlacemark createFromParcel(Parcel source) { return new KmlPlacemark(source); } @Override public KmlPlacemark[] newArray(int size) { return new KmlPlacemark[size]; } }; public KmlPlacemark(Parcel in){ super(in); mGeometry = in.readParcelable(KmlGeometry.class.getClassLoader()); } }
Solving issue #111: support GeoJSON nested properties.
osmdroid-android/src/org/osmdroid/bonuspack/kml/KmlPlacemark.java
Solving issue #111: support GeoJSON nested properties.
Java
apache-2.0
1b75b134b640f096c783bec40e9fc13407f1ebcf
0
kroepke/luna,mjanicek/rembulan
/* * Copyright 2016 Miroslav Janíček * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sandius.rembulan.core; import net.sandius.rembulan.LuaType; public abstract class Values { private Values() { // not to be instantiated or extended } /* * Mappings between types: * * Lua | Java (Rembulan) * ----------+--------------------- * nil | null pointer * boolean | java.lang.Boolean * number | java.lang.Number; * | floats: java.lang.Double (canonical), java.lang.Float * | integers: any other subclass of Number, java.lang.Long being * | the canonical representation * string | java.lang.String * table | net.sandius.rembulan.core.Table * function | net.sandius.rembulan.core.Function * userdata | full userdata: net.sandius.rembulan.core.Userdata * | light userdata: any class other than those mentioned here */ public static LuaType typeOf(Object v) { if (v == null) return LuaType.NIL; else if (v instanceof Boolean) return LuaType.BOOLEAN; else if (v instanceof Number) return LuaType.NUMBER; else if (v instanceof String) return LuaType.STRING; else if (v instanceof Table) return LuaType.TABLE; else if (v instanceof Invokable) return LuaType.FUNCTION; else if (v instanceof Coroutine) return LuaType.THREAD; else return LuaType.USERDATA; } public static boolean isNaN(Object o) { return (o instanceof Double || o instanceof Float) && Double.isNaN(((Number) o).doubleValue()); } public static boolean isLightUserdata(Object o) { return typeOf(o) == LuaType.USERDATA && !(o instanceof Userdata); } }
rembulan-core/src/main/java/net/sandius/rembulan/core/Values.java
/* * Copyright 2016 Miroslav Janíček * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sandius.rembulan.core; import net.sandius.rembulan.LuaFormat; import net.sandius.rembulan.LuaType; public abstract class Values { private Values() { // not to be instantiated or extended } /* * Mappings between types: * * Lua | Java (Rembulan) * ----------+--------------------- * nil | null pointer * boolean | java.lang.Boolean * number | java.lang.Number; * | floats: java.lang.Double (canonical), java.lang.Float * | integers: any other subclass of Number, java.lang.Long being * | the canonical representation * string | java.lang.String * table | net.sandius.rembulan.core.Table * function | net.sandius.rembulan.core.Function * userdata | full userdata: net.sandius.rembulan.core.Userdata * | light userdata: any class other than those mentioned here */ public static LuaType typeOf(Object v) { if (v == null) return LuaType.NIL; else if (v instanceof Boolean) return LuaType.BOOLEAN; else if (v instanceof Number) return LuaType.NUMBER; else if (v instanceof String) return LuaType.STRING; else if (v instanceof Table) return LuaType.TABLE; else if (v instanceof Invokable) return LuaType.FUNCTION; else if (v instanceof Coroutine) return LuaType.THREAD; else return LuaType.USERDATA; } public static boolean isNaN(Object o) { return (o instanceof Double || o instanceof Float) && Double.isNaN(((Number) o).doubleValue()); } public static boolean isLightUserdata(Object o) { return typeOf(o) == LuaType.USERDATA && !(o instanceof Userdata); } }
Removing an unused import.
rembulan-core/src/main/java/net/sandius/rembulan/core/Values.java
Removing an unused import.
Java
apache-2.0
0d00014061f1516ef534ac6626e27d618ea05c5d
0
jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim
/* * JaamSim Discrete Event Simulation * Copyright (C) 2011 Ausenco Engineering Canada Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ package com.jaamsim.ui; import java.awt.Component; import java.awt.Cursor; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.datatransfer.UnsupportedFlavorException; import java.awt.dnd.DnDConstants; import java.awt.dnd.DragGestureEvent; import java.awt.dnd.DragGestureListener; import java.awt.dnd.DragSource; import java.awt.event.MouseEvent; import java.awt.image.BufferedImage; import java.util.ArrayList; import java.util.HashMap; import javax.swing.ImageIcon; import javax.swing.JFrame; import javax.swing.JScrollPane; import javax.swing.JTree; import javax.swing.ToolTipManager; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeCellRenderer; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreePath; import javax.swing.tree.TreeSelectionModel; import com.jaamsim.DisplayModels.DisplayModel; import com.jaamsim.basicsim.ObjectType; import com.jaamsim.controllers.RenderManager; import com.jaamsim.render.Future; import com.jaamsim.render.RenderUtils; public class EntityPallet extends JFrame implements DragGestureListener { private static EntityPallet myInstance; // only one instance allowed to be open private final JScrollPane treeView; private final JTree tree; private final DefaultMutableTreeNode top; private final DefaultTreeModel treeModel; private EntityPallet() { super( "Model Builder" ); setType(Type.UTILITY); setAutoRequestFocus(false); // Make the x button do the same as the close button setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); addWindowListener(FrameBox.getCloseListener("ShowModelBuilder")); tree = new MyTree(); tree.setRootVisible(false); tree.setShowsRootHandles(true); DragSource dragSource = new DragSource(); dragSource.createDefaultDragGestureRecognizer(tree, DnDConstants.ACTION_COPY, this); top = EntityPallet.createTree(); treeModel = new DefaultTreeModel(top); tree.setModel(treeModel); tree.getSelectionModel().setSelectionMode( TreeSelectionModel.SINGLE_TREE_SELECTION ); // Create the tree scroll pane and add the tree to it treeView = new JScrollPane( tree ); getContentPane().add( treeView ); tree.setRowHeight(25); tree.setCellRenderer(new TreeCellRenderer()); ToolTipManager.sharedInstance().registerComponent(tree); ToolTipManager.sharedInstance().setDismissDelay(600000); setLocation(GUIFrame.COL1_START, GUIFrame.TOP_START); setSize(GUIFrame.COL1_WIDTH, GUIFrame.HALF_TOP); } @Override public void dragGestureRecognized(DragGestureEvent event) { TreePath path = tree.getSelectionPath(); if (path != null) { // Dragged node is a DefaultMutableTreeNode if(path.getLastPathComponent() instanceof DefaultMutableTreeNode) { DefaultMutableTreeNode treeNode = (DefaultMutableTreeNode) path.getLastPathComponent(); // This is an ObjectType node if(treeNode.getUserObject() instanceof ObjectType) { ObjectType type = (ObjectType) treeNode.getUserObject(); Cursor cursor = null; if (event.getDragAction() == DnDConstants.ACTION_COPY) { cursor = DragSource.DefaultCopyDrop; } if (RenderManager.isGood()) { // The new renderer is initialized RenderManager.inst().startDragAndDrop(type); event.startDrag(cursor,new TransferableObjectType(type), RenderManager.inst()); } else { event.startDrag(cursor,new TransferableObjectType(type)); } } } } } private static DefaultMutableTreeNode createTree() { // Create a tree that allows one selection at a time DefaultMutableTreeNode root = new DefaultMutableTreeNode(); HashMap<String, DefaultMutableTreeNode> paletteNodes = new HashMap<>(); for (ObjectType type : ObjectType.getAll()) { if (!type.isDragAndDrop()) continue; String pName = type.getPaletteName(); DefaultMutableTreeNode palNode = paletteNodes.get(pName); if (palNode == null) { palNode = new DefaultMutableTreeNode(pName, true); paletteNodes.put(pName, palNode); root.add(palNode); } DefaultMutableTreeNode classNode = new DefaultMutableTreeNode(type, true); palNode.add(classNode); } return root; } public synchronized static EntityPallet getInstance() { if (myInstance == null) myInstance = new EntityPallet(); return myInstance; } /** * Disposes the only instance of the entity pallet */ public static void clear() { if (myInstance != null) { myInstance.dispose(); myInstance = null; } } private static final Runnable notifier = new PalletNotifier(); private static final class PalletNotifier implements Runnable { @Override public void run() { EntityPallet.getInstance().repaint(); } } private static class TreeCellRenderer extends DefaultTreeCellRenderer { private final ImageIcon icon = new ImageIcon(); @Override public Component getTreeCellRendererComponent(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { super.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus); // If not a leaf, just return if (!leaf) return this; // If we don't find an ObjectType (likely we will) just return Object userObj = ((DefaultMutableTreeNode)value).getUserObject(); if (!(userObj instanceof ObjectType)) return this; ObjectType type = (ObjectType)userObj; this.setText(type.getName()); if (!RenderManager.isGood()) return this; if (type.getIconImage() != null) { icon.setImage(type.getIconImage()); this.setIcon(icon); return this; } ArrayList<DisplayModel> dm = type.getDefaultDisplayModel(); if (dm.isEmpty()) return this; Future<BufferedImage> fi = RenderManager.inst().getPreviewForDisplayModel(dm.get(0), notifier); if (fi.failed() || !fi.isDone()) return this; icon.setImage(RenderUtils.scaleToRes(fi.get(), 24, 24)); this.setIcon(icon); return this; } } static class MyTree extends JTree { public MyTree() { } /* * override getToolTipText to control what to display */ @Override public String getToolTipText(MouseEvent e) { if(this.getPathForLocation(e.getX(), e.getY()) == null) { return null; } // Obtain the node under the mouse DefaultMutableTreeNode node = (DefaultMutableTreeNode)this.getPathForLocation(e.getX(), e.getY()).getLastPathComponent(); if(node == null) { return null; } Object object = node.getUserObject(); // It is a leaf node if (!(object instanceof ObjectType)) { return null; } ObjectType ot = (ObjectType)object; return GUIFrame.formatToolTip(ot.getName(), ot.getDescription(0)); } } private final static DataFlavor OBJECT_TYPE_FLAVOR; static { try { // Create OBJECT_TYPE_FLAVOR String objectTypeFlavor = DataFlavor.javaJVMLocalObjectMimeType + ";class=" + TransferableObjectType.class.getName(); OBJECT_TYPE_FLAVOR = new DataFlavor(objectTypeFlavor); } catch (ClassNotFoundException ex) { throw new RuntimeException(ex); } } private static class TransferableObjectType implements Transferable { private final ObjectType type; TransferableObjectType(ObjectType type) { this.type = type; } @Override public DataFlavor [] getTransferDataFlavors() { return new DataFlavor [] {OBJECT_TYPE_FLAVOR}; } @Override public boolean isDataFlavorSupported(DataFlavor flavor) { return OBJECT_TYPE_FLAVOR.equals(flavor); } @Override public Object getTransferData(DataFlavor flavor) throws UnsupportedFlavorException { if (flavor.equals(OBJECT_TYPE_FLAVOR)) { return type; } else { throw new UnsupportedFlavorException(flavor); } } } }
src/main/java/com/jaamsim/ui/EntityPallet.java
/* * JaamSim Discrete Event Simulation * Copyright (C) 2011 Ausenco Engineering Canada Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ package com.jaamsim.ui; import java.awt.Component; import java.awt.Cursor; import java.awt.Dimension; import java.awt.datatransfer.DataFlavor; import java.awt.datatransfer.Transferable; import java.awt.datatransfer.UnsupportedFlavorException; import java.awt.dnd.DnDConstants; import java.awt.dnd.DragGestureEvent; import java.awt.dnd.DragGestureListener; import java.awt.dnd.DragSource; import java.awt.event.MouseEvent; import java.awt.image.BufferedImage; import java.util.ArrayList; import java.util.HashMap; import javax.swing.ImageIcon; import javax.swing.JFrame; import javax.swing.JScrollPane; import javax.swing.JTree; import javax.swing.ToolTipManager; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeCellRenderer; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreePath; import javax.swing.tree.TreeSelectionModel; import com.jaamsim.DisplayModels.DisplayModel; import com.jaamsim.basicsim.ObjectType; import com.jaamsim.controllers.RenderManager; import com.jaamsim.render.Future; import com.jaamsim.render.RenderUtils; public class EntityPallet extends JFrame implements DragGestureListener { private static EntityPallet myInstance; // only one instance allowed to be open private final JScrollPane treeView; private final JTree tree; private final DefaultMutableTreeNode top; private final DefaultTreeModel treeModel; private EntityPallet() { super( "Model Builder" ); setType(Type.UTILITY); setAutoRequestFocus(false); // Make the x button do the same as the close button setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); addWindowListener(FrameBox.getCloseListener("ShowModelBuilder")); tree = new MyTree(); tree.setRootVisible(false); tree.setShowsRootHandles(true); DragSource dragSource = new DragSource(); dragSource.createDefaultDragGestureRecognizer(tree, DnDConstants.ACTION_COPY, this); top = EntityPallet.createTree(); treeModel = new DefaultTreeModel(top); tree.setModel(treeModel); tree.getSelectionModel().setSelectionMode( TreeSelectionModel.SINGLE_TREE_SELECTION ); // Create the tree scroll pane and add the tree to it treeView = new JScrollPane( tree ); getContentPane().add( treeView ); tree.setRowHeight(25); tree.setCellRenderer(new TreeCellRenderer()); ToolTipManager.sharedInstance().registerComponent(tree); ToolTipManager.sharedInstance().setDismissDelay(600000); setLocation(GUIFrame.COL1_START, GUIFrame.TOP_START); setSize(GUIFrame.COL1_WIDTH, GUIFrame.HALF_TOP); } @Override public void dragGestureRecognized(DragGestureEvent event) { TreePath path = tree.getSelectionPath(); if (path != null) { // Dragged node is a DefaultMutableTreeNode if(path.getLastPathComponent() instanceof DefaultMutableTreeNode) { DefaultMutableTreeNode treeNode = (DefaultMutableTreeNode) path.getLastPathComponent(); // This is an ObjectType node if(treeNode.getUserObject() instanceof ObjectType) { ObjectType type = (ObjectType) treeNode.getUserObject(); Cursor cursor = null; if (event.getDragAction() == DnDConstants.ACTION_COPY) { cursor = DragSource.DefaultCopyDrop; } if (RenderManager.isGood()) { // The new renderer is initialized RenderManager.inst().startDragAndDrop(type); event.startDrag(cursor,new TransferableObjectType(type), RenderManager.inst()); } else { event.startDrag(cursor,new TransferableObjectType(type)); } } } } } private static DefaultMutableTreeNode createTree() { // Create a tree that allows one selection at a time DefaultMutableTreeNode root = new DefaultMutableTreeNode(); HashMap<String, DefaultMutableTreeNode> paletteNodes = new HashMap<>(); for (ObjectType type : ObjectType.getAll()) { if (!type.isDragAndDrop()) continue; String pName = type.getPaletteName(); DefaultMutableTreeNode palNode = paletteNodes.get(pName); if (palNode == null) { palNode = new DefaultMutableTreeNode(pName, true); paletteNodes.put(pName, palNode); root.add(palNode); } DefaultMutableTreeNode classNode = new DefaultMutableTreeNode(type, true); palNode.add(classNode); } return root; } public synchronized static EntityPallet getInstance() { if (myInstance == null) myInstance = new EntityPallet(); return myInstance; } /** * Disposes the only instance of the entity pallet */ public static void clear() { if (myInstance != null) { myInstance.dispose(); myInstance = null; } } private static final Dimension prefSize = new Dimension(220, 24); private static final Runnable notifier = new PalletNotifier(); private static final class PalletNotifier implements Runnable { @Override public void run() { EntityPallet.getInstance().repaint(); } } private static class TreeCellRenderer extends DefaultTreeCellRenderer { private final ImageIcon icon = new ImageIcon(); @Override public Component getTreeCellRendererComponent(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { super.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus); // If not a leaf, just return if (!leaf) return this; // If we don't find an ObjectType (likely we will) just return Object userObj = ((DefaultMutableTreeNode)value).getUserObject(); if (!(userObj instanceof ObjectType)) return this; ObjectType type = (ObjectType)userObj; this.setText(type.getName()); this.setPreferredSize(prefSize); if (!RenderManager.isGood()) return this; if (type.getIconImage() != null) { icon.setImage(type.getIconImage()); this.setIcon(icon); return this; } ArrayList<DisplayModel> dm = type.getDefaultDisplayModel(); if (dm.isEmpty()) return this; Future<BufferedImage> fi = RenderManager.inst().getPreviewForDisplayModel(dm.get(0), notifier); if (fi.failed() || !fi.isDone()) return this; icon.setImage(RenderUtils.scaleToRes(fi.get(), 24, 24)); this.setIcon(icon); return this; } } static class MyTree extends JTree { public MyTree() { } /* * override getToolTipText to control what to display */ @Override public String getToolTipText(MouseEvent e) { if(this.getPathForLocation(e.getX(), e.getY()) == null) { return null; } // Obtain the node under the mouse DefaultMutableTreeNode node = (DefaultMutableTreeNode)this.getPathForLocation(e.getX(), e.getY()).getLastPathComponent(); if(node == null) { return null; } Object object = node.getUserObject(); // It is a leaf node if (!(object instanceof ObjectType)) { return null; } ObjectType ot = (ObjectType)object; return GUIFrame.formatToolTip(ot.getName(), ot.getDescription(0)); } } private final static DataFlavor OBJECT_TYPE_FLAVOR; static { try { // Create OBJECT_TYPE_FLAVOR String objectTypeFlavor = DataFlavor.javaJVMLocalObjectMimeType + ";class=" + TransferableObjectType.class.getName(); OBJECT_TYPE_FLAVOR = new DataFlavor(objectTypeFlavor); } catch (ClassNotFoundException ex) { throw new RuntimeException(ex); } } private static class TransferableObjectType implements Transferable { private final ObjectType type; TransferableObjectType(ObjectType type) { this.type = type; } @Override public DataFlavor [] getTransferDataFlavors() { return new DataFlavor [] {OBJECT_TYPE_FLAVOR}; } @Override public boolean isDataFlavorSupported(DataFlavor flavor) { return OBJECT_TYPE_FLAVOR.equals(flavor); } @Override public Object getTransferData(DataFlavor flavor) throws UnsupportedFlavorException { if (flavor.equals(OBJECT_TYPE_FLAVOR)) { return type; } else { throw new UnsupportedFlavorException(flavor); } } } }
JS: Use the default selection size for Model Builder Signed-off-by: Harry King <[email protected]> Signed-off-by: Stephen Wong <[email protected]>
src/main/java/com/jaamsim/ui/EntityPallet.java
JS: Use the default selection size for Model Builder
Java
apache-2.0
e82a84a98747fc51c737ae575431f28b69a6f4bf
0
raviagarwal7/buck,robbertvanginkel/buck,Learn-Android-app/buck,janicduplessis/buck,hgl888/buck,vschs007/buck,dsyang/buck,rowillia/buck,davido/buck,luiseduardohdbackup/buck,nguyentruongtho/buck,MarkRunWu/buck,Dominator008/buck,bocon13/buck,Dominator008/buck,daedric/buck,Learn-Android-app/buck,mikekap/buck,romanoid/buck,darkforestzero/buck,1yvT0s/buck,clonetwin26/buck,Addepar/buck,lukw00/buck,Addepar/buck,jackminicloud/buck,neonichu/buck,vine/buck,raviagarwal7/buck,Dominator008/buck,zhuxiaohao/buck,liuyang-li/buck,mikekap/buck,denizt/buck,brettwooldridge/buck,pwz3n0/buck,grumpyjames/buck,marcinkwiatkowski/buck,illicitonion/buck,artiya4u/buck,JoelMarcey/buck,Dominator008/buck,OkBuilds/buck,lukw00/buck,k21/buck,marcinkwiatkowski/buck,tgummerer/buck,zhan-xiong/buck,Distrotech/buck,dsyang/buck,GerritCodeReview/buck,romanoid/buck,mnuessler/buck,grumpyjames/buck,kageiit/buck,nguyentruongtho/buck,zhuxiaohao/buck,GerritCodeReview/buck,siddhartharay007/buck,marcinkwiatkowski/buck,dsyang/buck,siddhartharay007/buck,janicduplessis/buck,k21/buck,illicitonion/buck,bocon13/buck,liuyang-li/buck,hgl888/buck,k21/buck,siddhartharay007/buck,shybovycha/buck,illicitonion/buck,zpao/buck,davido/buck,ilya-klyuchnikov/buck,davido/buck,raviagarwal7/buck,illicitonion/buck,thinkernel/buck,vine/buck,sdwilsh/buck,bocon13/buck,luiseduardohdbackup/buck,siddhartharay007/buck,mikekap/buck,dushmis/buck,marcinkwiatkowski/buck,luiseduardohdbackup/buck,rowillia/buck,mread/buck,mnuessler/buck,stuhood/buck,Dominator008/buck,hgl888/buck,robbertvanginkel/buck,belomx/open_tools,MarkRunWu/buck,shs96c/buck,daedric/buck,dushmis/buck,illicitonion/buck,mikekap/buck,janicduplessis/buck,dsyang/buck,hgl888/buck,siddhartharay007/buck,shs96c/buck,facebook/buck,vine/buck,sdwilsh/buck,zhuxiaohao/buck,LegNeato/buck,darkforestzero/buck,stuhood/buck,belomx/open_tools,Addepar/buck,artiya4u/buck,raviagarwal7/buck,ilya-klyuchnikov/buck,dsyang/buck,Learn-Android-app/buck,clonetwin26/buck,lukw00/buck,facebook/buck,denizt/buck,OkBuilds/buck,SeleniumHQ/buck,vine/buck,janicduplessis/buck,ilya-klyuchnikov/buck,stuhood/buck,1yvT0s/buck,OkBuilds/buck,k21/buck,dpursehouse/buck,mogers/buck,mikekap/buck,neonichu/buck,shs96c/buck,davido/buck,rmaz/buck,JoelMarcey/buck,Addepar/buck,daedric/buck,stuhood/buck,JoelMarcey/buck,rhencke/buck,tgummerer/buck,belomx/open_tools,LegNeato/buck,romanoid/buck,illicitonion/buck,zhan-xiong/buck,Dominator008/buck,janicduplessis/buck,OkBuilds/buck,darkforestzero/buck,artiya4u/buck,darkforestzero/buck,1yvT0s/buck,OkBuilds/buck,stuhood/buck,daedric/buck,justinmuller/buck,marcinkwiatkowski/buck,Heart2009/buck,rmaz/buck,shybovycha/buck,siddhartharay007/buck,grumpyjames/buck,Learn-Android-app/buck,tgummerer/buck,rowillia/buck,Distrotech/buck,zhuxiaohao/buck,romanoid/buck,brettwooldridge/buck,Heart2009/buck,shs96c/buck,clonetwin26/buck,mikekap/buck,darkforestzero/buck,rhencke/buck,Dominator008/buck,mikekap/buck,robbertvanginkel/buck,k21/buck,dsyang/buck,k21/buck,davido/buck,luiseduardohdbackup/buck,ilya-klyuchnikov/buck,zhan-xiong/buck,Distrotech/buck,darkforestzero/buck,SeleniumHQ/buck,bocon13/buck,janicduplessis/buck,raviagarwal7/buck,mikekap/buck,pwz3n0/buck,Addepar/buck,rmaz/buck,Distrotech/buck,lukw00/buck,sdwilsh/buck,justinmuller/buck,JoelMarcey/buck,pwz3n0/buck,thinkernel/buck,grumpyjames/buck,hgl888/buck,liuyang-li/buck,rhencke/buck,raviagarwal7/buck,facebook/buck,neonichu/buck,mogers/buck,mnuessler/buck,clonetwin26/buck,Heart2009/buck,justinmuller/buck,romanoid/buck,nguyentruongtho/buck,brettwooldridge/buck,MarkRunWu/buck,davido/buck,justinmuller/buck,OkBuilds/buck,sdwilsh/buck,Addepar/buck,vine/buck,robbertvanginkel/buck,kageiit/buck,janicduplessis/buck,pwz3n0/buck,MarkRunWu/buck,shs96c/buck,clonetwin26/buck,shs96c/buck,shs96c/buck,thinkernel/buck,justinmuller/buck,marcinkwiatkowski/buck,zhan-xiong/buck,liuyang-li/buck,romanoid/buck,darkforestzero/buck,marcinkwiatkowski/buck,siddhartharay007/buck,JoelMarcey/buck,Addepar/buck,denizt/buck,justinmuller/buck,marcinkwiatkowski/buck,LegNeato/buck,clonetwin26/buck,Distrotech/buck,lukw00/buck,Addepar/buck,k21/buck,dushmis/buck,shybovycha/buck,daedric/buck,MarkRunWu/buck,Dominator008/buck,shs96c/buck,zhan-xiong/buck,JoelMarcey/buck,pwz3n0/buck,robbertvanginkel/buck,ilya-klyuchnikov/buck,vschs007/buck,dsyang/buck,saleeh93/buck-cutom,clonetwin26/buck,rmaz/buck,saleeh93/buck-cutom,raviagarwal7/buck,lukw00/buck,ilya-klyuchnikov/buck,k21/buck,stuhood/buck,davido/buck,sdwilsh/buck,rhencke/buck,davido/buck,mogers/buck,SeleniumHQ/buck,illicitonion/buck,mread/buck,clonetwin26/buck,janicduplessis/buck,rowillia/buck,Dominator008/buck,dpursehouse/buck,pwz3n0/buck,grumpyjames/buck,dpursehouse/buck,Heart2009/buck,zhan-xiong/buck,Addepar/buck,justinmuller/buck,shybovycha/buck,Learn-Android-app/buck,JoelMarcey/buck,romanoid/buck,luiseduardohdbackup/buck,davido/buck,justinmuller/buck,ilya-klyuchnikov/buck,liuyang-li/buck,GerritCodeReview/buck,robbertvanginkel/buck,rmaz/buck,ilya-klyuchnikov/buck,justinmuller/buck,clonetwin26/buck,hgl888/buck,artiya4u/buck,1yvT0s/buck,shybovycha/buck,k21/buck,grumpyjames/buck,rmaz/buck,mogers/buck,shybovycha/buck,dpursehouse/buck,rmaz/buck,rowillia/buck,grumpyjames/buck,davido/buck,raviagarwal7/buck,bocon13/buck,darkforestzero/buck,mnuessler/buck,shybovycha/buck,luiseduardohdbackup/buck,rhencke/buck,kageiit/buck,Dominator008/buck,bocon13/buck,Heart2009/buck,1yvT0s/buck,JoelMarcey/buck,stuhood/buck,robbertvanginkel/buck,zpao/buck,rowillia/buck,saleeh93/buck-cutom,clonetwin26/buck,1yvT0s/buck,bocon13/buck,davido/buck,justinmuller/buck,mnuessler/buck,illicitonion/buck,SeleniumHQ/buck,Heart2009/buck,Dominator008/buck,rmaz/buck,JoelMarcey/buck,k21/buck,tgummerer/buck,dsyang/buck,pwz3n0/buck,rhencke/buck,daedric/buck,luiseduardohdbackup/buck,neonichu/buck,lukw00/buck,illicitonion/buck,marcinkwiatkowski/buck,Distrotech/buck,robbertvanginkel/buck,kageiit/buck,brettwooldridge/buck,pwz3n0/buck,shybovycha/buck,dushmis/buck,Addepar/buck,illicitonion/buck,nguyentruongtho/buck,OkBuilds/buck,davido/buck,mread/buck,mnuessler/buck,vine/buck,brettwooldridge/buck,dushmis/buck,k21/buck,Distrotech/buck,dpursehouse/buck,lukw00/buck,zhan-xiong/buck,romanoid/buck,vschs007/buck,raviagarwal7/buck,artiya4u/buck,bocon13/buck,daedric/buck,dushmis/buck,zhuxiaohao/buck,thinkernel/buck,rowillia/buck,OkBuilds/buck,mogers/buck,nguyentruongtho/buck,marcinkwiatkowski/buck,hgl888/buck,jackminicloud/buck,bocon13/buck,mnuessler/buck,dsyang/buck,LegNeato/buck,vine/buck,luiseduardohdbackup/buck,rmaz/buck,kageiit/buck,shs96c/buck,stuhood/buck,rowillia/buck,vschs007/buck,mikekap/buck,artiya4u/buck,Addepar/buck,illicitonion/buck,siddhartharay007/buck,siddhartharay007/buck,liuyang-li/buck,artiya4u/buck,romanoid/buck,zhuxiaohao/buck,MarkRunWu/buck,shybovycha/buck,mogers/buck,darkforestzero/buck,Learn-Android-app/buck,vine/buck,sdwilsh/buck,belomx/open_tools,dsyang/buck,mogers/buck,OkBuilds/buck,rhencke/buck,bocon13/buck,daedric/buck,rhencke/buck,pwz3n0/buck,tgummerer/buck,1yvT0s/buck,robbertvanginkel/buck,rmaz/buck,shs96c/buck,romanoid/buck,zpao/buck,robbertvanginkel/buck,JoelMarcey/buck,rmaz/buck,janicduplessis/buck,tgummerer/buck,zhan-xiong/buck,liuyang-li/buck,dushmis/buck,bocon13/buck,zhan-xiong/buck,robbertvanginkel/buck,dpursehouse/buck,zpao/buck,Heart2009/buck,SeleniumHQ/buck,dushmis/buck,shs96c/buck,artiya4u/buck,ilya-klyuchnikov/buck,JoelMarcey/buck,artiya4u/buck,lukw00/buck,illicitonion/buck,k21/buck,grumpyjames/buck,zhan-xiong/buck,brettwooldridge/buck,sdwilsh/buck,darkforestzero/buck,Heart2009/buck,LegNeato/buck,shybovycha/buck,Addepar/buck,Heart2009/buck,SeleniumHQ/buck,zpao/buck,brettwooldridge/buck,ilya-klyuchnikov/buck,ilya-klyuchnikov/buck,1yvT0s/buck,MarkRunWu/buck,clonetwin26/buck,brettwooldridge/buck,OkBuilds/buck,marcinkwiatkowski/buck,justinmuller/buck,tgummerer/buck,SeleniumHQ/buck,pwz3n0/buck,dpursehouse/buck,LegNeato/buck,SeleniumHQ/buck,mogers/buck,mogers/buck,vschs007/buck,stuhood/buck,rhencke/buck,denizt/buck,rhencke/buck,SeleniumHQ/buck,vschs007/buck,justinmuller/buck,romanoid/buck,tgummerer/buck,rowillia/buck,pwz3n0/buck,robbertvanginkel/buck,Distrotech/buck,zpao/buck,romanoid/buck,nguyentruongtho/buck,mread/buck,shs96c/buck,Dominator008/buck,ilya-klyuchnikov/buck,grumpyjames/buck,zhuxiaohao/buck,Learn-Android-app/buck,raviagarwal7/buck,zhuxiaohao/buck,GerritCodeReview/buck,LegNeato/buck,shybovycha/buck,brettwooldridge/buck,rhencke/buck,sdwilsh/buck,liuyang-li/buck,kageiit/buck,Distrotech/buck,lukw00/buck,vine/buck,marcinkwiatkowski/buck,Learn-Android-app/buck,justinmuller/buck,zhan-xiong/buck,robbertvanginkel/buck,vschs007/buck,hgl888/buck,clonetwin26/buck,SeleniumHQ/buck,jackminicloud/buck,mikekap/buck,JoelMarcey/buck,illicitonion/buck,artiya4u/buck,vschs007/buck,k21/buck,mogers/buck,1yvT0s/buck,facebook/buck,janicduplessis/buck,Learn-Android-app/buck,janicduplessis/buck,luiseduardohdbackup/buck,artiya4u/buck,LegNeato/buck,neonichu/buck,mread/buck,mogers/buck,grumpyjames/buck,clonetwin26/buck,LegNeato/buck,shs96c/buck,sdwilsh/buck,OkBuilds/buck,daedric/buck,liuyang-li/buck,rmaz/buck,dsyang/buck,OkBuilds/buck,neonichu/buck,SeleniumHQ/buck,zhuxiaohao/buck,sdwilsh/buck,brettwooldridge/buck,facebook/buck,neonichu/buck,LegNeato/buck,OkBuilds/buck,darkforestzero/buck,sdwilsh/buck,SeleniumHQ/buck,vschs007/buck,rowillia/buck,tgummerer/buck,ilya-klyuchnikov/buck,tgummerer/buck,liuyang-li/buck,vschs007/buck,mnuessler/buck,LegNeato/buck,brettwooldridge/buck,daedric/buck,LegNeato/buck,rmaz/buck,hgl888/buck,davido/buck,saleeh93/buck-cutom,sdwilsh/buck,darkforestzero/buck,bocon13/buck,liuyang-li/buck,dsyang/buck,saleeh93/buck-cutom,mread/buck,shybovycha/buck,shybovycha/buck,facebook/buck,Addepar/buck,neonichu/buck,darkforestzero/buck,facebook/buck,SeleniumHQ/buck,JoelMarcey/buck,jackminicloud/buck,stuhood/buck,vschs007/buck,daedric/buck,raviagarwal7/buck,LegNeato/buck,zpao/buck,dushmis/buck,vschs007/buck,Learn-Android-app/buck,tgummerer/buck,zhan-xiong/buck,stuhood/buck,dpursehouse/buck,mikekap/buck,saleeh93/buck-cutom,grumpyjames/buck,vine/buck,Learn-Android-app/buck,rowillia/buck,zhan-xiong/buck,Distrotech/buck,dsyang/buck,sdwilsh/buck,mikekap/buck,grumpyjames/buck,brettwooldridge/buck,raviagarwal7/buck,raviagarwal7/buck,neonichu/buck,marcinkwiatkowski/buck,brettwooldridge/buck,Distrotech/buck,daedric/buck,janicduplessis/buck,MarkRunWu/buck,daedric/buck,denizt/buck,romanoid/buck,kageiit/buck,vschs007/buck,dushmis/buck,rowillia/buck,lukw00/buck,mnuessler/buck,vine/buck,nguyentruongtho/buck
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.httpserver; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; /** * A WebSocket server that reports events of buck. */ public class WebServer { private final int port; private final Server server; private final StreamingWebSocketServlet streamingWebSocketServlet; public WebServer(int port) { this.port = port; this.server = new Server(port); this.streamingWebSocketServlet = new StreamingWebSocketServlet(); } public int getPort() { return port; } public WebServerBuckEventListener createListener() { return new WebServerBuckEventListener(this); } public StreamingWebSocketServlet getStreamingWebSocketServlet() { return streamingWebSocketServlet; } public synchronized void start() throws WebServerException { if (server.isStarted()) { return; } // Create a handler that acts as a WebSocket server. ServletContextHandler servletContextHandler = new ServletContextHandler( /* parent */ server, /* contextPath */ "/comet", /* sessions */ true, /* security */ false); servletContextHandler.addServlet(new ServletHolder(streamingWebSocketServlet), "/echo"); // Package up all of the handlers into a ContextHandlerCollection to serve as the handler for // the server. ContextHandlerCollection contexts = new ContextHandlerCollection(); Handler[] handlers = new Handler[] {servletContextHandler}; contexts.setHandlers(handlers); server.setHandler(contexts); try { server.start(); } catch (Exception e) { throw new WebServerException("Can not start Websocket server.", e); } } public synchronized void stop() throws WebServerException { if (!server.isRunning()) { return; } try { server.stop(); } catch (Exception e) { throw new WebServerException("Can not stop Websocket server.", e); } } @SuppressWarnings("serial") public class WebServerException extends Exception { public WebServerException(String message, Exception clause) { super(message, clause); } } }
src/com/facebook/buck/httpserver/WebServer.java
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.httpserver; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; /** * A WebSocket server that reports events of buck. */ public class WebServer { private final int port; private final Server server; private final StreamingWebSocketServlet streamingWebSocketServlet; public WebServer(int port) { this.port = port; this.server = new Server(port); this.streamingWebSocketServlet = new StreamingWebSocketServlet(); } public int getPort() { return port; } public WebServerBuckEventListener createListener() { return new WebServerBuckEventListener(this); } public StreamingWebSocketServlet getStreamingWebSocketServlet() { return streamingWebSocketServlet; } public synchronized void start() throws WebServerException { if (server.isStarted()) { return; } // Create a handler that acts as a WebSocket server. ServletContextHandler servletContextHandler = new ServletContextHandler( /* parent */ server, /* contextPath */ "/comet", /* sessions */ true, /* security */ false); servletContextHandler.addServlet(new ServletHolder(streamingWebSocketServlet), "/echo"); // Package up all of the handlers into a ContextHandlerCollection to serve as the handler for // the server. ContextHandlerCollection contexts = new ContextHandlerCollection(); Handler[] handlers = new Handler[] {servletContextHandler}; contexts.setHandlers(handlers); server.setHandler(contexts); try { server.start(); } catch (Exception e) { throw new WebServerException("Can not start Websocket server.", e); } } public synchronized void stop() throws WebServerException { if (!server.isRunning()) { return; } try { server.stop(); } catch (Exception e) { throw new WebServerException("Can not stop Websocket server.", e); } } public class WebServerException extends Exception { public WebServerException(String message, Exception clause) { super(message, clause); } } }
Add @SuppressWarnings("serial") to an Exception. Test Plan: Eliminates a warning in Eclipse.
src/com/facebook/buck/httpserver/WebServer.java
Add @SuppressWarnings("serial") to an Exception.
Java
apache-2.0
5059cd925bc5b396d92a0f799e152ffdb634d54a
0
JavaMoney/jsr354-ri-bp,JavaMoney/jsr354-ri-bp
/** * Copyright (c) 2012, 2014, Credit Suisse (Anatole Tresch), Werner Keil and others by the @author tag. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.javamoney.moneta; import javax.money.CurrencyUnit; import javax.money.Monetary; import javax.money.MonetaryAmount; import javax.money.MonetaryOperator; import javax.money.MonetaryQuery; import org.junit.Assert; import org.testng.annotations.Test; import java.io.*; import java.lang.invoke.MethodHandles; import java.math.BigDecimal; import java.math.BigInteger; import java.util.logging.Level; import java.util.logging.Logger; import static org.testng.Assert.*; /** * @author Anatole */ public class FastMoneyTest{ private static final Logger LOG = Logger.getLogger(MethodHandles.lookup().lookupClass().getName()); private static final BigDecimal TEN = new BigDecimal(10.0d); protected static final CurrencyUnit EURO = Monetary.getCurrency("EUR"); protected static final CurrencyUnit DOLLAR = Monetary.getCurrency("USD"); /** * Test method for * {@link FastMoney#of(java.lang.Number, javax.money.CurrencyUnit)} . */ @Test public void testOfCurrencyUnitBigDecimal(){ FastMoney m = FastMoney.of(TEN, Monetary.getCurrency("EUR")); assertEquals(new BigDecimal("10").intValue(), m.getNumber().numberValue(BigDecimal.class).intValue()); } @Test public void testOfCurrencyUnitDouble(){ FastMoney m = FastMoney.of(10.0d, Monetary.getCurrency("EUR")); assertTrue(TEN.doubleValue() == m.getNumber().doubleValue()); } /** * Test method for {@link FastMoney#getCurrency()}. */ @Test public void testGetCurrency(){ MonetaryAmount money = FastMoney.of(BigDecimal.TEN, EURO); assertNotNull(money.getCurrency()); assertEquals("EUR", money.getCurrency().getCurrencyCode()); } @Test public void testSubtractMonetaryAmount(){ FastMoney money1 = FastMoney.of(BigDecimal.TEN, EURO); FastMoney money2 = FastMoney.of(BigDecimal.ONE, EURO); FastMoney moneyResult = money1.subtract(money2); assertNotNull(moneyResult); assertEquals(9d, moneyResult.getNumber().doubleValue(), 0d); } @Test public void testDivideAndRemainder_BigDecimal(){ FastMoney money1 = FastMoney.of(BigDecimal.ONE, EURO); FastMoney[] divideAndRemainder = money1.divideAndRemainder(new BigDecimal("0.50001")); assertEquals(divideAndRemainder[0].getNumber().numberValue(BigDecimal.class), new BigDecimal("1")); assertEquals(divideAndRemainder[1].getNumber().numberValue(BigDecimal.class), new BigDecimal("0.49999")); } @Test public void testDivideToIntegralValue_BigDecimal(){ FastMoney money1 = FastMoney.of(BigDecimal.ONE, EURO); FastMoney result = money1.divideToIntegralValue(new BigDecimal("0.5001")); assertEquals(result.getNumber().numberValue(BigDecimal.class),BigDecimal.ONE); result = money1.divideToIntegralValue(new BigDecimal("0.2001")); assertEquals(result.getNumber().numberValue(BigDecimal.class),BigDecimal.valueOf(4l)); result = money1.divideToIntegralValue(BigDecimal.valueOf(5)); assertTrue(result.getNumber().numberValue(BigDecimal.class).intValueExact() == 0); } /** * Test method for {@link FastMoney#hashCode()}. */ @Test public void testHashCode(){ FastMoney money1 = FastMoney.of(BigDecimal.ONE, EURO); FastMoney money2 = FastMoney.of(new BigDecimal("1"), EURO); assertEquals(money1.hashCode(), money2.hashCode()); FastMoney money3 = FastMoney.of(1.0, DOLLAR); assertTrue(money1.hashCode() != money3.hashCode()); assertTrue(money2.hashCode() != money3.hashCode()); FastMoney money4 = FastMoney.of(BigDecimal.ONE, DOLLAR); assertTrue(money1.hashCode() != money4.hashCode()); assertTrue(money2.hashCode() != money4.hashCode()); FastMoney money5 = FastMoney.of(BigDecimal.ONE, DOLLAR); FastMoney money6 = FastMoney.of(1.0, DOLLAR); assertTrue(money1.hashCode() != money5.hashCode()); assertTrue(money2.hashCode() != money5.hashCode()); assertTrue(money1.hashCode() != money6.hashCode()); assertTrue(money2.hashCode() != money6.hashCode()); } /** * Test method for * {@link FastMoney#of(java.lang.Number, javax.money.CurrencyUnit)} . */ @Test public void testOfCurrencyUnitNumber(){ FastMoney m = FastMoney.of((byte) 2, EURO); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(Byte.valueOf((byte) 2), m.getNumber().numberValue(Byte.class)); m = FastMoney.of((short) -2, DOLLAR); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Short.valueOf((short) -2), m.getNumber().numberValue(Short.class)); m = FastMoney.of(-12, EURO); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(Integer.valueOf(-12), m.getNumber().numberValue(Integer.class)); m = FastMoney.of((long) 12, DOLLAR); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Long.valueOf(12), m.getNumber().numberValue(Long.class)); m = FastMoney.of((float) 12.23, EURO); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals((float) 12.23, m.getNumber().numberValue(Float.class)); m = FastMoney.of(-12.23, DOLLAR); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(-12.23, m.getNumber().numberValue(Double.class)); m = FastMoney.of(BigDecimal.valueOf(234.2345), EURO); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(new BigDecimal("234.2345"), m.getNumber().numberValue(BigDecimal.class)); m = FastMoney.of(BigInteger.valueOf(232323123L), DOLLAR); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Long.valueOf(232323123L), m.getNumber().numberValue(Long.class)); assertEquals(BigInteger.valueOf(232323123L), m.getNumber().numberValue(BigInteger.class)); } /** * Test method for {@link FastMoney#of(java.lang.Number, java.lang.String)} * . */ @Test public void testOfStringNumber(){ FastMoney m = FastMoney.of((byte) 2, "EUR"); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(Byte.valueOf((byte) 2), m.getNumber().numberValue(Byte.class)); m = FastMoney.of((short) -2, "USD"); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Short.valueOf((short) -2), m.getNumber().numberValue(Short.class)); m = FastMoney.of(-12, "EUR"); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(Integer.valueOf(-12), m.getNumber().numberValue(Integer.class)); m = FastMoney.of((long) 12, "USD"); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Long.valueOf(12), m.getNumber().numberValue(Long.class)); m = FastMoney.of((float) 12.23, "EUR"); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals((float) 12.23, m.getNumber().numberValue(Float.class)); m = FastMoney.of(-12.23, "USD"); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(-12.23, m.getNumber().numberValue(Double.class)); m = FastMoney.of(BigDecimal.valueOf(234.2345), "EUR"); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(new BigDecimal("234.2345"), m.getNumber().numberValue(BigDecimal.class)); m = FastMoney.of(BigInteger.valueOf(21432432L), "USD"); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Long.valueOf(21432432L), m.getNumber().numberValue(Long.class)); assertEquals(BigInteger.valueOf(21432432L), m.getNumber().numberValue(BigInteger.class)); } /** * Test method for {@link FastMoney#equals(java.lang.Object)}. */ @Test public void testEqualsObject(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(BigDecimal.ONE, "CHF"), FastMoney.of(BigDecimal.ONE, "XXX"), FastMoney.of(BigDecimal.ONE.negate(), "XXX")}; for(int i = 0; i < moneys.length; i++){ for(int j = 0; j < moneys.length; j++){ if(i == j){ assertEquals(moneys[i], moneys[j]); }else{ assertNotSame(moneys[i], moneys[j]); } } } } /** * Test method for {@link FastMoney#compareTo(javax.money.MonetaryAmount)} * . */ @Test public void testCompareTo(){ FastMoney m1 = FastMoney.of(-2, "CHF"); FastMoney m2 = FastMoney.of(0, "CHF"); FastMoney m3 = FastMoney.of(-0, "CHF"); FastMoney m4 = FastMoney.of(2, "CHF"); assertEquals(0, m2.compareTo(m3)); assertEquals(0, m2.compareTo(m2)); assertEquals(0, m3.compareTo(m3)); assertEquals(0, m3.compareTo(m2)); assertTrue(m1.compareTo(m2) < 0); assertTrue(m2.compareTo(m1) > 0); assertTrue(m1.compareTo(m3) < 0); assertTrue(m2.compareTo(m3) == 0); assertTrue(m1.compareTo(m4) < 0); assertTrue(m3.compareTo(m4) < 0); assertTrue(m4.compareTo(m1) > 0); assertTrue(m4.compareTo(m2) > 0); } /** * Test method for {@link FastMoney#abs()}. */ @Test public void testAbs(){ FastMoney m = FastMoney.of(10, "CHF"); assertEquals(m, m.abs()); assertTrue(m == m.abs()); m = FastMoney.of(0, "CHF"); assertEquals(m, m.abs()); assertTrue(m == m.abs()); m = FastMoney.of(-10, "CHF"); assertEquals(m.negate(), m.abs()); assertTrue(m != m.abs()); // Long.MIN_VALUE * -1 == Long.MIN_VALUE m = FastMoney.of(new BigDecimal(Long.MIN_VALUE).movePointLeft(5), "CHF"); assertFalse(m.isPositiveOrZero()); try { assertTrue(m.abs().isPositiveOrZero(), "FastMoney.abs() >= 0 failed for " + m); } catch (ArithmeticException e) { // could happen } } /** * Test method for {@link FastMoney#add(javax.money.MonetaryAmount)} . */ @Test public void testAdd(){ FastMoney money1 = FastMoney.of(BigDecimal.TEN, EURO); FastMoney money2 = FastMoney.of(BigDecimal.ONE, EURO); FastMoney moneyResult = money1.add(money2); assertNotNull(moneyResult); assertEquals(11d, moneyResult.getNumber().doubleValue(), 0d); FastMoney money3 = FastMoney.of(90000000000000L, "CHF"); try { // the maximum value for FastMoney is 92233720368547.75807 so this should overflow money3.add(money3); fail("overflow should raise ArithmeticException"); } catch (ArithmeticException e) { // should happen } } /** * Test method for {@link FastMoney#divide(java.lang.Number)}. */ @Test(expectedExceptions = java.lang.ArithmeticException.class) public void testDivideNumber_Overflow() { FastMoney m = FastMoney.of(100, "CHF"); // the argument exceeds the numeric capabilities but the result will not BigDecimal divisor = new BigDecimal("100000000000000000"); m.divide(divisor); } /** * Test method for {@link FastMoney#divide(java.lang.Number)}. */ @Test public void testDivideNumber(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(BigDecimal.valueOf(20), "CHF"), m.divide(BigDecimal.valueOf(5))); // the maximum value for FastMoney is 92233720368547.75807 // so this should fit right below this limit BigDecimal baseValue = new BigDecimal("90000000000"); // the argument exceeds the numeric capabilities but the result will not BigDecimal divisor = new BigDecimal("1000000"); BigDecimal expectedValue = baseValue.divide(divisor); m = FastMoney.of(baseValue, "CHF"); assertEquals(FastMoney.of(expectedValue, "CHF"), m.divide(divisor)); } /** * Test method for {@link FastMoney#divide(long)}. */ @Test public void testDivideLong(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(BigDecimal.valueOf(20), "CHF"), m.divide(5L)); } /** * Test method for {@link FastMoney#divide(double)}. */ @Test public void testDividedouble(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(BigDecimal.valueOf(20), "CHF"), m.divide(5.0d)); } /** * Test method for {@link FastMoney#divideAndRemainder(java.lang.Number)} . */ @Test public void testDivideAndRemainderNumber(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of( BigDecimal.valueOf(33), "CHF"), m.divideAndRemainder(BigDecimal.valueOf(3))[0] ); assertEquals(FastMoney.of( BigDecimal.valueOf(1), "CHF"), m.divideAndRemainder(BigDecimal.valueOf(3))[1] ); } /** * Test method for * {@link FastMoney#divideToIntegralValue(java.lang.Number)} . */ @Test public void testDivideToIntegralValueNumber(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of( BigDecimal.valueOf(5), "CHF"), m.divideToIntegralValue(BigDecimal.valueOf(20)) ); assertEquals(FastMoney.of( BigDecimal.valueOf(33), "CHF"), m.divideToIntegralValue(BigDecimal.valueOf(3)) ); } /** * Test method for {@link FastMoney#multiply(java.lang.Number)}. */ @Test public void testMultiplyNumber(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(10, "CHF"), m.multiply(new BigDecimal("0.1"))); // the maximum value for FastMoney is 92233720368547.75807 // so this should fit right below this limit BigDecimal baseValue = new BigDecimal("90000000000000"); BigDecimal expectedValue = new BigDecimal("90000000000000.00009"); BigDecimal multiplicant = new BigDecimal("1.000000000000000001"); // verify the expected results assertEquals(0, expectedValue.compareTo(baseValue.multiply(multiplicant))); m = FastMoney.of(baseValue, "CHF"); try { m.multiply(baseValue); fail("overflow should raise ArithmeticException"); } catch (ArithmeticException e) { // should happen } } /** * Test method for {@link FastMoney#multiply(long)}. */ @Test public void testMultiplyLong(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(400, "CHF"), m.multiply(4)); assertEquals(FastMoney.of(200, "CHF"), m.multiply(2)); assertEquals(FastMoney.of(new BigDecimal("50.0"), "CHF"), m.multiply(0.5)); // Zero test m = FastMoney.of(100, "CHF"); assertEquals( m.multiply(0), FastMoney.of(0, "CHF")); m = FastMoney.of(0, "CHF"); assertEquals( m.multiply(10), FastMoney.of(0, "CHF")); try { // the maximum value for FastMoney is 92233720368547.75807 so this should overflow FastMoney.of(90000000000000L, "CHF").multiply(90000000000000L); fail("overflow should raise ArithmeticException"); } catch (ArithmeticException e) { // should happen } try { // the maximum value for FastMoney is 92233720368547.75807 // these values are lower, but the overflow detection does not work // correct. FastMoney.of(-53484567177043L, "CHF").multiply(2178802625L); fail("overflow should raise ArithmeticException"); } catch (ArithmeticException e) { // should happen } } /** * Test method for {@link FastMoney#multiply(double)}. */ @Test public void testMultiplyDouble(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(new BigDecimal("50.0"), "CHF"), m.multiply(0.5)); } /** * Test method for {@link FastMoney#multiply(double)}. */ @Test public void testMultiplyDoublePositiveInfinity() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.POSITIVE_INFINITY); fail("multiplying with POSITIVE_INFINITY should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with POSITIVE_INFINITY fails as expected", e); } } /** * Test method for {@link FastMoney#multiply(double)}. */ @Test public void testMultiplyDoubleNegativeInfinity() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.NEGATIVE_INFINITY); fail("multiplying with NEGATIVE_INFINITY should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with NEGATIVE_INFINITY fails as expected", e); } } /** * Test method for {@link FastMoney#multiply(double)}. */ @Test public void testMultiplyDoubleNaN() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.NaN); fail("multiplying with NaN should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with NaN fails as expected", e); } } /** * Test method for {@link FastMoney#multiply(Number)}. */ @Test public void testMultiplyNumberPositiveInfinity() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.valueOf(Double.POSITIVE_INFINITY)); fail("multiplying with POSITIVE_INFINITY should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with POSITIVE_INFINITY fails as expected", e); } } /** * Test method for {@link FastMoney#multiply(Number)}. */ @Test public void testMultiplyNumberNegativeInfinity() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.valueOf(Double.NEGATIVE_INFINITY)); fail("multiplying with NEGATIVE_INFINITY should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with NEGATIVE_INFINITY fails as expected", e); } } /** * Test method for {@link FastMoney#multiply(Number)}. */ @Test public void testMultiplyNumberNaN() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.valueOf(Double.NaN)); fail("multiplying with NaN should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with NaN fails as expected", e); } } /** * Test method for {@link FastMoney#divide(double)}. */ @Test public void testDivideBadNaN() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.divide(Double.NaN); fail("dividing by NaN should not be allowed"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "dividing by NaN fails as expected", e); } try { m.divide(Double.valueOf(Double.NaN)); fail("dividing by h NaN should not be allowed"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "dividing by NaN fails as expected", e); } } /** * Test method for {@link FastMoney#divide(double)}. */ @Test public void testDivideInfinityDoubles() { double[] values = new double[]{Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY}; FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); for (double d : values) { assertTrue(m.divide(d).isZero()); assertTrue(m.divide(Double.valueOf(d)).isZero()); } } /** * Test method for {@link FastMoney#negate()}. */ @Test public void testNegate(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(-100, "CHF"), m.negate()); m = FastMoney.of(-123.234, "CHF"); assertEquals(FastMoney.of(123.234, "CHF"), m.negate()); // Long.MIN_VALUE * -1 == Long.MIN_VALUE m = FastMoney.of(new BigDecimal(Long.MIN_VALUE).movePointLeft(5), "CHF"); assertTrue(m.isNegative()); try { assertFalse(m.negate().isNegative(), "FastMoney.negate() < 0 failed for " + m); } catch (ArithmeticException e) { // should happen } m = FastMoney.of(0, "CHF"); assertEquals(m.negate(), FastMoney.of(0, "CHF")); } /** * Test method for {@link FastMoney#plus()}. */ @Test public void testPlus(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(100, "CHF"), m.plus()); m = FastMoney.of(123.234, "CHF"); assertEquals(FastMoney.of(123.234, "CHF"), m.plus()); } /** * Test method for {@link FastMoney#subtract(javax.money.MonetaryAmount)} . */ @Test public void testSubtract(){ FastMoney m = FastMoney.of(100, "CHF"); FastMoney s1 = FastMoney.of(100, "CHF"); FastMoney s2 = FastMoney.of(200, "CHF"); FastMoney s3 = FastMoney.of(0, "CHF"); assertEquals(FastMoney.of(0, "CHF"), m.subtract(s1)); assertEquals(FastMoney.of(-100, "CHF"), m.subtract(s2)); assertEquals(FastMoney.of(100, "CHF"), m.subtract(s3)); assertTrue(m == m.subtract(s3)); m = FastMoney.of(-123.234, "CHF"); assertEquals(FastMoney.of(new BigDecimal("-223.234"), "CHF"), m.subtract(s1)); assertEquals(FastMoney.of(new BigDecimal("-323.234"), "CHF"), m.subtract(s2)); assertEquals(FastMoney.of(new BigDecimal("-123.234"), "CHF"), m.subtract(s3)); assertTrue(m == m.subtract(s3)); m = FastMoney.of(12.40234, "CHF"); s1 = FastMoney.of(2343.45, "CHF"); s2 = FastMoney.of(12.40234, "CHF"); s3 = FastMoney.of(-2343.45, "CHF"); assertEquals(FastMoney.of(new BigDecimal("12.40234").subtract(new BigDecimal("2343.45")), "CHF"), m.subtract(s1)); assertEquals(FastMoney.of(new BigDecimal("12.402345534").subtract(new BigDecimal("12.402345534")), "CHF"), m.subtract(s2)); assertEquals(FastMoney.of(0, "CHF"), m.subtract(s2)); assertEquals(FastMoney.of(new BigDecimal("2355.85234"), "CHF"), m.subtract(s3)); assertTrue(m == m.subtract(FastMoney.of(0, "CHF"))); } /** * Test method for {@link FastMoney#remainder(java.lang.Number)} . */ @Test public void testRemainderNumber(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF"), FastMoney.of(0, "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-723527.36532, "CHF")}; for(FastMoney m : moneys){ assertEquals(m.getFactory().setNumber( m.getNumber().numberValue(BigDecimal.class).remainder(BigDecimal.valueOf(10.50))) .create(), m.remainder(10.50), "Invalid remainder of " + 10.50 ); assertEquals(m.getFactory().setNumber( m.getNumber().numberValue(BigDecimal.class).remainder(BigDecimal.valueOf(-30.20))) .create(), m.remainder(-30.20), "Invalid remainder of " + -30.20 ); assertEquals(m.getFactory().setNumber( m.getNumber().numberValue(BigDecimal.class).remainder(BigDecimal.valueOf(-3))) .create(), m.remainder(-3),"Invalid remainder of " + -3 ); assertEquals(m.getFactory().setNumber( m.getNumber().numberValue(BigDecimal.class).remainder(BigDecimal.valueOf(3))).create(), m.remainder(3), "Invalid remainder of " + 3 ); } } /** * Test method for {@link FastMoney#scaleByPowerOfTen(int)} . */ @Test public void testScaleByPowerOfTen(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF"), FastMoney.of(0, "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-723527.36532, "CHF")}; for(FastMoney m : moneys){ for(int p = 0; p < 3; p++){ assertEquals(m.getFactory().setNumber(m.getNumber().numberValue(BigDecimal.class).scaleByPowerOfTen(p)) .create(), m.scaleByPowerOfTen(p), "Invalid scaleByPowerOfTen." ); } } moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF"), FastMoney.of(0, "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-723527.32, "CHF")}; for(FastMoney m : moneys){ for(int p = -2; p < 0; p++){ assertEquals(m.getFactory().setNumber(m.getNumber().numberValue(BigDecimal.class).scaleByPowerOfTen(p)) .create(), m.scaleByPowerOfTen(p), "Invalid scaleByPowerOfTen." ); } } } /** * Test method for {@link FastMoney#isZero()}. */ @Test public void testIsZero(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-723527.36532, "CHF")}; for(FastMoney m : moneys){ assertFalse(m.isZero()); } moneys = new FastMoney[]{FastMoney.of(0, "CHF"), FastMoney.of(0.0, "CHF"), FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(new BigDecimal("0.00000000000000000"), "CHF")}; for(FastMoney m : moneys){ assertTrue(m.isZero()); } } /** * Test method for {@link FastMoney#isPositive()}. */ @Test public void testIsPositive(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF")}; for(FastMoney m : moneys){ assertTrue(m.isPositive()); } moneys = new FastMoney[]{FastMoney.of(0, "CHF"), FastMoney.of(0.0, "CHF"), FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(new BigDecimal("0.00000000000000000"), "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-34242344, "CHF"), FastMoney.of(-23123213.435, "CHF")}; for(FastMoney m : moneys){ assertFalse(m.isPositive()); } } /** * Test method for {@link FastMoney#isPositiveOrZero()} . */ @Test public void testIsPositiveOrZero(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(0, "CHF"), FastMoney.of(0.0, "CHF"), FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(new BigDecimal("0.00000000000000000"), "CHF"), FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF")}; for(FastMoney m : moneys){ assertTrue(m.isPositiveOrZero(), "Invalid positiveOrZero (expected true): " + m); } moneys = new FastMoney[]{FastMoney.of(-100, "CHF"), FastMoney.of(-34242344, "CHF"), FastMoney.of(-23123213.435, "CHF")}; for(FastMoney m : moneys){ assertFalse(m.isPositiveOrZero(), "Invalid positiveOrZero (expected false): " + m); } } /** * Test method for {@link FastMoney#isNegative()}. */ @Test public void testIsNegative(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(0, "CHF"), FastMoney.of(0.0, "CHF"), FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(new BigDecimal("0.00000000000000000"), "CHF"), FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF")}; for(FastMoney m : moneys){ assertFalse(m.isNegative(), "Invalid isNegative (expected false): " + m); } moneys = new FastMoney[]{FastMoney.of(-100, "CHF"), FastMoney.of(-34242344, "CHF"), FastMoney.of(-23123213.435, "CHF")}; for(FastMoney m : moneys){ assertTrue(m.isNegative(), "Invalid isNegative (expected true): " + m); } } /** * Test method for {@link FastMoney#isNegativeOrZero()} . */ @Test public void testIsNegativeOrZero(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF")}; for(FastMoney m : moneys){ assertFalse(m.isNegativeOrZero(), "Invalid negativeOrZero (expected false): " + m); } moneys = new FastMoney[]{FastMoney.of(0, "CHF"), FastMoney.of(0.0, "CHF"), FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(new BigDecimal("0.00000000000000000"), "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-34242344, "CHF"), FastMoney.of(-23123213.435, "CHF")}; for(FastMoney m : moneys){ assertTrue(m.isNegativeOrZero(), "Invalid negativeOrZero (expected true): " + m); } } /** * Test method for {@link FastMoney#getFactory()#setNumber(java.lang.Number)} . */ @Test public void testWithNumber(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(new BigDecimal("23123213.435"), "CHF"), FastMoney.of(new BigDecimal("-23123213.435"), "CHF"), FastMoney.of(-23123213, "CHF"), FastMoney.of(0, "CHF")}; FastMoney s = FastMoney.of(10, "CHF"); MonetaryAmount[] moneys2 = new MonetaryAmount[]{s.getFactory().setNumber(100).create(), s.getFactory().setNumber(34242344).create(), s.getFactory().setNumber(new BigDecimal("23123213.435")).create(), s.getFactory().setNumber(new BigDecimal("-23123213.435")).create(), s.getFactory().setNumber(-23123213).create(), s.getFactory().setNumber(0).create()}; for(int i = 0; i < moneys.length; i++){ assertEquals(moneys[i], moneys2[i], "with(Number) failed."); } } /** * Test method for * {@link FastMoney#getFactory()#setCurrency(javax.money.CurrencyUnit)} and {@link org * .javamoney.moneta.FastMoney#getFactory()#setNumber(java.lang.Number)} . */ @Test public void testWithCurrencyUnitNumber(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "USD"), FastMoney.of(23123213.435, "EUR"), FastMoney.of(-23123213.435, "USS"), FastMoney.of(-23123213, "USN"), FastMoney.of(0, "GBP")}; FastMoney s = FastMoney.of(10, "XXX"); MonetaryAmount[] moneys2 = new MonetaryAmount[]{ s.getFactory().setCurrency(Monetary.getCurrency("CHF")).setNumber(100).create(), s.getFactory().setCurrency(Monetary.getCurrency("USD")).setNumber(34242344).create(), s.getFactory().setCurrency(Monetary.getCurrency("EUR")) .setNumber(new BigDecimal("23123213.435")).create(), s.getFactory().setCurrency(Monetary.getCurrency("USS")) .setNumber(new BigDecimal("-23123213.435")).create(), s.getFactory().setCurrency(Monetary.getCurrency("USN")).setNumber(-23123213).create(), s.getFactory().setCurrency(Monetary.getCurrency("GBP")).setNumber(0).create()}; for(int i = 0; i < moneys.length; i++){ assertEquals(moneys[i], moneys2[i], "with(Number) failed."); } } /** * Test method for {@link FastMoney#getScale()}. */ @Test public void testGetScale(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "USD"), FastMoney.of(23123213.435, "EUR"), FastMoney.of(-23123213.435, "USS"), FastMoney.of(-23123213, "USN"), FastMoney.of(0, "GBP")}; for(FastMoney m : moneys){ assertEquals(5, m.getScale(), "Scale for " + m); } } /** * Test method for {@link FastMoney#getPrecision()}. */ @Test public void testGetPrecision(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(111, "CHF"), FastMoney.of(34242344, "USD"), FastMoney.of(23123213.435, "EUR"), FastMoney.of(-23123213.435, "USS"), FastMoney.of(-23123213, "USN"), FastMoney.of(0, "GBP")}; for(FastMoney m : moneys){ assertEquals(m.getNumber().numberValue(BigDecimal.class).precision(), m .getPrecision(), "Precision for " + m); } } /** * Test method for {@link FastMoney#getNumber()#longValue()}. */ @Test(expectedExceptions = ArithmeticException.class) public void testLongValue(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(100L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney.of(-100, "CHF"); assertEquals(-100L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney.of(-100.3434, "CHF"); assertEquals(-100L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney.of(100.3434, "CHF"); assertEquals(100L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney.of(0, "CHF"); assertEquals(0L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney.of(-0.0, "CHF"); assertEquals(0L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney .of(new BigDecimal( "12121762517652176251725178251872652765321876352187635217835378125"), "CHF"); fail("longValue(12121762517652176251725178251872652765321876352187635217835378125) should fail!"); } /** * Test method for {@link FastMoney#getNumber()#longValueExact()}. */ @Test public void testLongValueExact(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(100L, m.getNumber().longValueExact(), "longValue of " + m); m = FastMoney.of(-100, "CHF"); assertEquals(-100L, m.getNumber().longValueExact(), "longValue of " + m); m = FastMoney.of(0, "CHF"); assertEquals(0L, m.getNumber().longValueExact(), "longValue of " + m); m = FastMoney.of(-0.0, "CHF"); assertEquals(0L, m.getNumber().longValueExact(), "longValue of " + m); try{ m = FastMoney.of(Long.MAX_VALUE, "CHF"); fail("longValueExact(12121762517652176251725178251872652765321876352187635217835378125) should fail!"); } catch(ArithmeticException e){ // OK } try{ m = FastMoney.of(Long.MIN_VALUE, "CHF"); fail("longValueExact(-100.3434) should raise an ArithmeticException."); } catch(ArithmeticException e){ // OK } try{ m = FastMoney.of(100.3434, "CHF"); m.getNumber().longValueExact(); fail("longValueExact(100.3434) should raise an ArithmeticException."); } catch(ArithmeticException e){ // OK } } /** * Test method for {@link FastMoney#getNumber()#doubleValue()}. */ @Test(expectedExceptions = ArithmeticException.class) public void testDoubleValue(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(100d, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney.of(-100, "CHF"); assertEquals(-100d, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney.of(-100.3434, "CHF"); assertEquals(-100.3434, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney.of(100.3434, "CHF"); assertEquals(100.3434, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney.of(0, "CHF"); assertEquals(0d, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney.of(-0.0, "CHF"); assertEquals(0d, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney .of(new BigDecimal( "12121762517652176251725178251872652765321876352187635217835378125"), "CHF"); m.getNumber().doubleValue(); fail("doubleValue(12121762517652176251725178251872652765321876352187635217835378125) should fail!"); } /** * Test method for {@link FastMoney#signum()}. */ @Test public void testSignum(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(1, m.signum(), "signum of " + m); m = FastMoney.of(-100, "CHF"); assertEquals(-1, m.signum(), "signum of " + m); m = FastMoney.of(100.3435, "CHF"); assertEquals(1, m.signum(), "signum of " + m); m = FastMoney.of(-100.3435, "CHF"); assertEquals(-1, m.signum(), "signum of " + m); m = FastMoney.of(0, "CHF"); assertEquals(0, m.signum(), "signum of " + m); m = FastMoney.of(-0, "CHF"); assertEquals(0, m.signum(), "signum of " + m); } /** * Test method for {@link FastMoney#isLessThan(javax.money.MonetaryAmount)} * . */ @Test public void testIsLessThan(){ assertFalse(FastMoney.of(BigDecimal.valueOf(0d), "CHF").isLessThan(FastMoney.of(BigDecimal.valueOf(0), "CHF"))); assertFalse(FastMoney.of(BigDecimal.valueOf(0.00001d), "CHF") .isLessThan(FastMoney.of(BigDecimal.valueOf(0d), "CHF"))); assertFalse(FastMoney.of(15, "CHF").isLessThan(FastMoney.of(10, "CHF"))); assertFalse(FastMoney.of(15.546, "CHF").isLessThan(FastMoney.of(10.34, "CHF"))); assertTrue(FastMoney.of(5, "CHF").isLessThan(FastMoney.of(10, "CHF"))); assertTrue(FastMoney.of(5.546, "CHF").isLessThan(FastMoney.of(10.34, "CHF"))); } /** * Test method for * {@link FastMoney#isLessThanOrEqualTo(javax.money.MonetaryAmount)} . */ @Test public void testIsLessThanOrEqualTo(){ assertTrue(FastMoney.of(BigDecimal.valueOf(0d), "CHF") .isLessThanOrEqualTo(FastMoney.of(BigDecimal.valueOf(0), "CHF"))); assertFalse(FastMoney.of(BigDecimal.valueOf(0.00001d), "CHF") .isLessThanOrEqualTo(FastMoney.of(BigDecimal.valueOf(0d), "CHF"))); assertFalse(FastMoney.of(15, "CHF").isLessThanOrEqualTo(FastMoney.of(10, "CHF"))); assertFalse(FastMoney.of(15.546, "CHF").isLessThan(FastMoney.of(10.34, "CHF"))); assertTrue(FastMoney.of(5, "CHF").isLessThanOrEqualTo(FastMoney.of(10, "CHF"))); assertTrue(FastMoney.of(5.546, "CHF").isLessThanOrEqualTo(FastMoney.of(10.34, "CHF"))); } /** * Test method for * {@link FastMoney#isGreaterThan(javax.money.MonetaryAmount)} . */ @Test public void testIsGreaterThan(){ assertFalse( FastMoney.of(BigDecimal.valueOf(0d), "CHF").isGreaterThan(FastMoney.of(BigDecimal.valueOf(0), "CHF"))); assertTrue(FastMoney.of(BigDecimal.valueOf(0.00001d), "CHF") .isGreaterThan(FastMoney.of(BigDecimal.valueOf(0d), "CHF"))); assertTrue(FastMoney.of(15, "CHF").isGreaterThan(FastMoney.of(10, "CHF"))); assertTrue(FastMoney.of(15.546, "CHF").isGreaterThan(FastMoney.of(10.34, "CHF"))); assertFalse(FastMoney.of(5, "CHF").isGreaterThan(FastMoney.of(10, "CHF"))); assertFalse(FastMoney.of(5.546, "CHF").isGreaterThan(FastMoney.of(10.34, "CHF"))); } /** * Test method for * {@link FastMoney#isGreaterThanOrEqualTo(javax.money.MonetaryAmount)} . */ @Test public void testIsGreaterThanOrEqualTo(){ assertTrue(FastMoney.of(BigDecimal.valueOf(0d), "CHF") .isGreaterThanOrEqualTo(FastMoney.of(BigDecimal.valueOf(0), "CHF"))); assertTrue(FastMoney.of(BigDecimal.valueOf(0.00001d), "CHF") .isGreaterThanOrEqualTo(FastMoney.of(BigDecimal.valueOf(0d), "CHF"))); assertTrue(FastMoney.of(15, "CHF").isGreaterThanOrEqualTo(FastMoney.of(10, "CHF"))); assertTrue(FastMoney.of(15.546, "CHF").isGreaterThanOrEqualTo(FastMoney.of(10.34, "CHF"))); assertFalse(FastMoney.of(5, "CHF").isGreaterThanOrEqualTo(FastMoney.of(10, "CHF"))); assertFalse(FastMoney.of(5.546, "CHF").isGreaterThanOrEqualTo(FastMoney.of(10.34, "CHF"))); } /** * Test method for {@link FastMoney#isEqualTo(javax.money.MonetaryAmount)} * . */ @Test public void testIsEqualTo(){ assertTrue(FastMoney.of(BigDecimal.valueOf(0d), "CHF").isEqualTo(FastMoney.of(BigDecimal.valueOf(0), "CHF"))); assertFalse(FastMoney.of(BigDecimal.valueOf(0.00001d), "CHF") .isEqualTo(FastMoney.of(BigDecimal.valueOf(0d), "CHF"))); assertTrue(FastMoney.of(BigDecimal.valueOf(5d), "CHF").isEqualTo(FastMoney.of(BigDecimal.valueOf(5), "CHF"))); assertTrue( FastMoney.of(BigDecimal.valueOf(1d), "CHF").isEqualTo(FastMoney.of(BigDecimal.valueOf(1.00), "CHF"))); assertTrue(FastMoney.of(BigDecimal.valueOf(1d), "CHF").isEqualTo(FastMoney.of(BigDecimal.ONE, "CHF"))); assertTrue(FastMoney.of(BigDecimal.valueOf(1), "CHF").isEqualTo(FastMoney.of(BigDecimal.ONE, "CHF"))); assertTrue( FastMoney.of(new BigDecimal("1.0000"), "CHF").isEqualTo(FastMoney.of(new BigDecimal("1.00"), "CHF"))); } /** * Test method for {@link FastMoney#getNumber()}. */ @Test public void testGetImplementationType(){ assertEquals(FastMoney.of(0, "CHF").getContext().getAmountType(), FastMoney.class); assertEquals(FastMoney.of(0.34746d, "CHF").getContext().getAmountType(), FastMoney.class); assertEquals(FastMoney.of(100034L, "CHF").getContext().getAmountType(), FastMoney.class); } /** * Test method for {@link FastMoney#query(javax.money.MonetaryQuery)}. */ @Test public void testQuery(){ MonetaryQuery<Integer> q = new MonetaryQuery<Integer>() { @Override public Integer queryFrom(MonetaryAmount amount) { return FastMoney.from(amount).getPrecision(); } }; FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "USD"), FastMoney.of(23123213.435, "EUR"), FastMoney.of(-23123213.435, "USS"), FastMoney.of(-23123213, "USN"), FastMoney.of(0, "GBP")}; for (FastMoney money : moneys) { assertEquals(money.query(q), (Integer) money.getPrecision()); } } /** * Test method for {@link FastMoney#getNumber()#asType(java.lang.Class)}. */ @Test public void testGetNumberClassOfT(){ FastMoney m = FastMoney.of(13.656, "CHF"); assertEquals(m.getNumber().numberValue(Byte.class), Byte.valueOf((byte) 13)); assertEquals(m.getNumber().numberValue(Short.class), Short.valueOf((short) 13)); assertEquals(m.getNumber().numberValue(Integer.class), Integer.valueOf(13)); assertEquals(m.getNumber().numberValue(Long.class), Long.valueOf(13L)); assertEquals(m.getNumber().numberValue(Float.class), 13.656f); assertEquals(m.getNumber().numberValue(Double.class), 13.656); assertEquals(m.getNumber().numberValue(BigDecimal.class), new BigDecimal("13.656")); } /** * Test method for {@link FastMoney#getNumber()#asNumber()}. */ @Test public void testGetNumber(){ assertEquals(BigDecimal.ZERO, FastMoney.of(0, "CHF").getNumber().numberValue(BigDecimal.class)); assertEquals(new BigDecimal("100034"), FastMoney.of(100034L, "CHF").getNumber().numberValue(BigDecimal.class)); assertEquals(new BigDecimal("0.34738"), FastMoney.of(new BigDecimal("0.34738"), "CHF").getNumber().numberValue(BigDecimal.class)); } /** * Test method for {@link FastMoney#toString()}. */ @Test public void testToString(){ assertEquals("XXX 1.23455", FastMoney.of(new BigDecimal("1.23455"), "XXX").toString()); assertEquals("CHF 1234.00000", FastMoney.of(1234, "CHF").toString()); assertEquals("CHF 1234.00000", FastMoney.of(new BigDecimal("1234.0"), "CHF").toString()); assertEquals("CHF 1234.10000", FastMoney.of(new BigDecimal("1234.1"), "CHF").toString()); assertEquals("CHF 0.01000", FastMoney.of(new BigDecimal("0.0100"), "CHF").toString()); } /** * Test method for {@link FastMoney#with(javax.money.MonetaryOperator)} . */ @Test public void testWithMonetaryOperator(){ MonetaryOperator adj = new MonetaryOperator(){ @Override public MonetaryAmount apply(MonetaryAmount amount) { return FastMoney.of(-100, amount.getCurrency()); } }; FastMoney m = FastMoney.of(new BigDecimal("1.2345"), "XXX"); FastMoney a = m.with(adj); assertNotNull(a); assertNotSame(m, a); assertEquals(m.getCurrency(), a.getCurrency()); assertEquals(FastMoney.of(-100, m.getCurrency()), a); adj = new MonetaryOperator(){ @Override public MonetaryAmount apply(MonetaryAmount amount) { return amount.multiply(2).getFactory().setCurrency(Monetary.getCurrency("CHF")).create(); } }; a = m.with(adj); assertNotNull(a); assertNotSame(m, a); assertEquals(Monetary.getCurrency("CHF"), a.getCurrency()); assertEquals(FastMoney.of(1.2345 * 2, a.getCurrency()), a); } /** * Test method for {@link FastMoney#from(javax.money.MonetaryAmount)}. */ @Test public void testFrom(){ FastMoney m = FastMoney.of(new BigDecimal("1.2345"), "XXX"); FastMoney m2 = FastMoney.from(m); assertTrue(m == m2); Money fm = Money.of(new BigDecimal("1.2345"), "XXX"); m2 = FastMoney.from(fm); assertFalse(m == m2); assertEquals(m, m2); } @Test public void testSerialization() throws IOException, ClassNotFoundException{ FastMoney m = FastMoney.of(new BigDecimal("1.2345"), "XXX"); ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(m); oos.flush(); ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bos.toByteArray())); FastMoney m2 = (FastMoney) ois.readObject(); assertEquals(m, m2); assertTrue(m != m2); } @Test public void parseTest() { FastMoney money = FastMoney.parse("EUR 25.25"); assertEquals(money.getCurrency(), EURO); assertEquals(money.getNumber().doubleValue(), 25.25); } /** * Test method for {@link Money#from(javax.money.MonetaryAmount)}. */ @Test public void testFromInversed(){ Money m = Money.of(new BigDecimal("1.2345"), "XXX"); Money m2 = Money.from(m); assertTrue(m == m2); FastMoney fm = FastMoney.of(new BigDecimal("1.2345"), "XXX"); m2 = Money.from(fm); assertFalse(m == m2); assertEquals(m, m2); } @Test(expectedExceptions = ArithmeticException.class) public void testCreatingFromDoubleNan(){ FastMoney.of(Double.NaN, "XXX"); } @Test(expectedExceptions = ArithmeticException.class) public void testCreatingFromDoublePositiveInfinity(){ FastMoney.of(Double.POSITIVE_INFINITY, "XXX"); } @Test(expectedExceptions = ArithmeticException.class) public void testCreatingFromDoubleNegativeInfinity(){ FastMoney.of(Double.NEGATIVE_INFINITY, "XXX"); } @Test(expectedExceptions = NullPointerException.class) public void shouldRerturnErrorWhenUsingZeroTheCurrencyIsNull() { FastMoney.zero(null); Assert.fail(); } @Test public void shouldRerturnZeroWhenUsingZero() { MonetaryAmount zero = FastMoney.zero(DOLLAR); assertEquals(BigDecimal.ZERO, zero.getNumber().numberValue(BigDecimal.class)); assertEquals(DOLLAR, zero.getCurrency()); } @Test(expectedExceptions = NullPointerException.class) public void shouldRerturnErrorWhenUsingOfMinorTheCurrencyIsNull() { FastMoney.ofMinor(null, 1234L); Assert.fail(); } @Test public void shouldRerturnMonetaryAmount() { MonetaryAmount amount = FastMoney.ofMinor(DOLLAR, 1234L); assertEquals(Double.valueOf(12.34), amount.getNumber().doubleValue()); assertEquals(DOLLAR, amount.getCurrency()); } @Test(expectedExceptions = IllegalArgumentException.class) public void shouldReturnErrorWhenCurrencyIsInvalid() { FastMoney.ofMinor(new InvalidCurrency(), 1234L); } @Test(expectedExceptions = IllegalArgumentException.class) public void shouldReturnErrorWhenFractionDigitIsNegative() { FastMoney.ofMinor(DOLLAR, 1234L, -2); } @Test public void shouldRerturnMonetaryAmountUsingFractionDigits() { MonetaryAmount amount = FastMoney.ofMinor(DOLLAR, 1234L, 3); assertEquals(Double.valueOf(1.234), amount.getNumber().doubleValue()); assertEquals(DOLLAR, amount.getCurrency()); } }
src/test/java/org/javamoney/moneta/FastMoneyTest.java
/** * Copyright (c) 2012, 2014, Credit Suisse (Anatole Tresch), Werner Keil and others by the @author tag. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.javamoney.moneta; import javax.money.CurrencyUnit; import javax.money.Monetary; import javax.money.MonetaryAmount; import javax.money.MonetaryOperator; import javax.money.MonetaryQuery; import org.junit.Assert; import org.testng.annotations.Test; import java.io.*; import java.lang.invoke.MethodHandles; import java.math.BigDecimal; import java.math.BigInteger; import java.util.logging.Level; import java.util.logging.Logger; import static org.testng.Assert.*; /** * @author Anatole */ public class FastMoneyTest{ private static final Logger LOG = Logger.getLogger(MethodHandles.lookup().lookupClass().getName()); private static final BigDecimal TEN = new BigDecimal(10.0d); protected static final CurrencyUnit EURO = Monetary.getCurrency("EUR"); protected static final CurrencyUnit DOLLAR = Monetary.getCurrency("USD"); /** * Test method for * {@link FastMoney#of(java.lang.Number, javax.money.CurrencyUnit)} . */ @Test public void testOfCurrencyUnitBigDecimal(){ FastMoney m = FastMoney.of(TEN, Monetary.getCurrency("EUR")); assertEquals(new BigDecimal("10").intValue(), m.getNumber().numberValue(BigDecimal.class).intValue()); } @Test public void testOfCurrencyUnitDouble(){ FastMoney m = FastMoney.of(10.0d, Monetary.getCurrency("EUR")); assertTrue(TEN.doubleValue() == m.getNumber().doubleValue()); } /** * Test method for {@link FastMoney#getCurrency()}. */ @Test public void testGetCurrency(){ MonetaryAmount money = FastMoney.of(BigDecimal.TEN, EURO); assertNotNull(money.getCurrency()); assertEquals("EUR", money.getCurrency().getCurrencyCode()); } @Test public void testSubtractMonetaryAmount(){ FastMoney money1 = FastMoney.of(BigDecimal.TEN, EURO); FastMoney money2 = FastMoney.of(BigDecimal.ONE, EURO); FastMoney moneyResult = money1.subtract(money2); assertNotNull(moneyResult); assertEquals(9d, moneyResult.getNumber().doubleValue(), 0d); } @Test public void testDivideAndRemainder_BigDecimal(){ FastMoney money1 = FastMoney.of(BigDecimal.ONE, EURO); FastMoney[] divideAndRemainder = money1.divideAndRemainder(new BigDecimal("0.50001")); assertEquals(divideAndRemainder[0].getNumber().numberValue(BigDecimal.class), new BigDecimal("1")); assertEquals(divideAndRemainder[1].getNumber().numberValue(BigDecimal.class), new BigDecimal("0.49999")); } @Test public void testDivideToIntegralValue_BigDecimal(){ FastMoney money1 = FastMoney.of(BigDecimal.ONE, EURO); FastMoney result = money1.divideToIntegralValue(new BigDecimal("0.5001")); assertEquals(result.getNumber().numberValue(BigDecimal.class),BigDecimal.ONE); result = money1.divideToIntegralValue(new BigDecimal("0.2001")); assertEquals(result.getNumber().numberValue(BigDecimal.class),BigDecimal.valueOf(4l)); result = money1.divideToIntegralValue(BigDecimal.valueOf(5)); assertTrue(result.getNumber().numberValue(BigDecimal.class).intValueExact() == 0); } /** * Test method for {@link FastMoney#hashCode()}. */ @Test public void testHashCode(){ FastMoney money1 = FastMoney.of(BigDecimal.ONE, EURO); FastMoney money2 = FastMoney.of(new BigDecimal("1"), EURO); assertEquals(money1.hashCode(), money2.hashCode()); FastMoney money3 = FastMoney.of(1.0, DOLLAR); assertTrue(money1.hashCode() != money3.hashCode()); assertTrue(money2.hashCode() != money3.hashCode()); FastMoney money4 = FastMoney.of(BigDecimal.ONE, DOLLAR); assertTrue(money1.hashCode() != money4.hashCode()); assertTrue(money2.hashCode() != money4.hashCode()); FastMoney money5 = FastMoney.of(BigDecimal.ONE, DOLLAR); FastMoney money6 = FastMoney.of(1.0, DOLLAR); assertTrue(money1.hashCode() != money5.hashCode()); assertTrue(money2.hashCode() != money5.hashCode()); assertTrue(money1.hashCode() != money6.hashCode()); assertTrue(money2.hashCode() != money6.hashCode()); } /** * Test method for * {@link FastMoney#of(java.lang.Number, javax.money.CurrencyUnit)} . */ @Test public void testOfCurrencyUnitNumber(){ FastMoney m = FastMoney.of((byte) 2, EURO); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(Byte.valueOf((byte) 2), m.getNumber().numberValue(Byte.class)); m = FastMoney.of((short) -2, DOLLAR); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Short.valueOf((short) -2), m.getNumber().numberValue(Short.class)); m = FastMoney.of(-12, EURO); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(Integer.valueOf(-12), m.getNumber().numberValue(Integer.class)); m = FastMoney.of((long) 12, DOLLAR); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Long.valueOf(12), m.getNumber().numberValue(Long.class)); m = FastMoney.of((float) 12.23, EURO); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals((float) 12.23, m.getNumber().numberValue(Float.class)); m = FastMoney.of(-12.23, DOLLAR); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(-12.23, m.getNumber().numberValue(Double.class)); m = FastMoney.of(BigDecimal.valueOf(234.2345), EURO); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(new BigDecimal("234.2345"), m.getNumber().numberValue(BigDecimal.class)); m = FastMoney.of(BigInteger.valueOf(232323123L), DOLLAR); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Long.valueOf(232323123L), m.getNumber().numberValue(Long.class)); assertEquals(BigInteger.valueOf(232323123L), m.getNumber().numberValue(BigInteger.class)); } /** * Test method for {@link FastMoney#of(java.lang.Number, java.lang.String)} * . */ @Test public void testOfStringNumber(){ FastMoney m = FastMoney.of((byte) 2, "EUR"); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(Byte.valueOf((byte) 2), m.getNumber().numberValue(Byte.class)); m = FastMoney.of((short) -2, "USD"); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Short.valueOf((short) -2), m.getNumber().numberValue(Short.class)); m = FastMoney.of(-12, "EUR"); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(Integer.valueOf(-12), m.getNumber().numberValue(Integer.class)); m = FastMoney.of((long) 12, "USD"); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Long.valueOf(12), m.getNumber().numberValue(Long.class)); m = FastMoney.of((float) 12.23, "EUR"); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals((float) 12.23, m.getNumber().numberValue(Float.class)); m = FastMoney.of(-12.23, "USD"); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(-12.23, m.getNumber().numberValue(Double.class)); m = FastMoney.of(BigDecimal.valueOf(234.2345), "EUR"); assertNotNull(m); assertEquals(EURO, m.getCurrency()); assertEquals(new BigDecimal("234.2345"), m.getNumber().numberValue(BigDecimal.class)); m = FastMoney.of(BigInteger.valueOf(21432432L), "USD"); assertNotNull(m); assertEquals(DOLLAR, m.getCurrency()); assertEquals(Long.valueOf(21432432L), m.getNumber().numberValue(Long.class)); assertEquals(BigInteger.valueOf(21432432L), m.getNumber().numberValue(BigInteger.class)); } /** * Test method for {@link FastMoney#equals(java.lang.Object)}. */ @Test public void testEqualsObject(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(BigDecimal.ONE, "CHF"), FastMoney.of(BigDecimal.ONE, "XXX"), FastMoney.of(BigDecimal.ONE.negate(), "XXX")}; for(int i = 0; i < moneys.length; i++){ for(int j = 0; j < moneys.length; j++){ if(i == j){ assertEquals(moneys[i], moneys[j]); }else{ assertNotSame(moneys[i], moneys[j]); } } } } /** * Test method for {@link FastMoney#compareTo(javax.money.MonetaryAmount)} * . */ @Test public void testCompareTo(){ FastMoney m1 = FastMoney.of(-2, "CHF"); FastMoney m2 = FastMoney.of(0, "CHF"); FastMoney m3 = FastMoney.of(-0, "CHF"); FastMoney m4 = FastMoney.of(2, "CHF"); assertEquals(0, m2.compareTo(m3)); assertEquals(0, m2.compareTo(m2)); assertEquals(0, m3.compareTo(m3)); assertEquals(0, m3.compareTo(m2)); assertTrue(m1.compareTo(m2) < 0); assertTrue(m2.compareTo(m1) > 0); assertTrue(m1.compareTo(m3) < 0); assertTrue(m2.compareTo(m3) == 0); assertTrue(m1.compareTo(m4) < 0); assertTrue(m3.compareTo(m4) < 0); assertTrue(m4.compareTo(m1) > 0); assertTrue(m4.compareTo(m2) > 0); } /** * Test method for {@link FastMoney#abs()}. */ @Test public void testAbs(){ FastMoney m = FastMoney.of(10, "CHF"); assertEquals(m, m.abs()); assertTrue(m == m.abs()); m = FastMoney.of(0, "CHF"); assertEquals(m, m.abs()); assertTrue(m == m.abs()); m = FastMoney.of(-10, "CHF"); assertEquals(m.negate(), m.abs()); assertTrue(m != m.abs()); // Long.MIN_VALUE * -1 == Long.MIN_VALUE m = FastMoney.of(new BigDecimal(Long.MIN_VALUE).movePointLeft(5), "CHF"); assertFalse(m.isPositiveOrZero()); try { assertTrue(m.abs().isPositiveOrZero(), "FastMoney.abs() >= 0 failed for " + m); } catch (ArithmeticException e) { // could happen } } /** * Test method for {@link FastMoney#add(javax.money.MonetaryAmount)} . */ @Test public void testAdd(){ FastMoney money1 = FastMoney.of(BigDecimal.TEN, EURO); FastMoney money2 = FastMoney.of(BigDecimal.ONE, EURO); FastMoney moneyResult = money1.add(money2); assertNotNull(moneyResult); assertEquals(11d, moneyResult.getNumber().doubleValue(), 0d); FastMoney money3 = FastMoney.of(90000000000000L, "CHF"); try { // the maximum value for FastMoney is 92233720368547.75807 so this should overflow money3.add(money3); fail("overflow should raise ArithmeticException"); } catch (ArithmeticException e) { // should happen } } /** * Test method for {@link FastMoney#divide(java.lang.Number)}. */ @Test(expectedExceptions = java.lang.ArithmeticException.class) public void testDivideNumber_Overflow() { FastMoney m = FastMoney.of(100, "CHF"); // the argument exceeds the numeric capabilities but the result will not BigDecimal divisor = new BigDecimal("100000000000000000"); m.divide(divisor); } /** * Test method for {@link FastMoney#divide(java.lang.Number)}. */ @Test public void testDivideNumber(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(BigDecimal.valueOf(20), "CHF"), m.divide(BigDecimal.valueOf(5))); // the maximum value for FastMoney is 92233720368547.75807 // so this should fit right below this limit BigDecimal baseValue = new BigDecimal("90000000000"); // the argument exceeds the numeric capabilities but the result will not BigDecimal divisor = new BigDecimal("1000000"); BigDecimal expectedValue = baseValue.divide(divisor); m = FastMoney.of(baseValue, "CHF"); assertEquals(FastMoney.of(expectedValue, "CHF"), m.divide(divisor)); } /** * Test method for {@link FastMoney#divide(long)}. */ @Test public void testDivideLong(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(BigDecimal.valueOf(20), "CHF"), m.divide(5L)); } /** * Test method for {@link FastMoney#divide(double)}. */ @Test public void testDividedouble(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(BigDecimal.valueOf(20), "CHF"), m.divide(5.0d)); } /** * Test method for {@link FastMoney#divideAndRemainder(java.lang.Number)} . */ @Test public void testDivideAndRemainderNumber(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of( BigDecimal.valueOf(33), "CHF"), m.divideAndRemainder(BigDecimal.valueOf(3))[0] ); assertEquals(FastMoney.of( BigDecimal.valueOf(1), "CHF"), m.divideAndRemainder(BigDecimal.valueOf(3))[1] ); } /** * Test method for * {@link FastMoney#divideToIntegralValue(java.lang.Number)} . */ @Test public void testDivideToIntegralValueNumber(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of( BigDecimal.valueOf(5), "CHF"), m.divideToIntegralValue(BigDecimal.valueOf(20)) ); assertEquals(FastMoney.of( BigDecimal.valueOf(33), "CHF"), m.divideToIntegralValue(BigDecimal.valueOf(3)) ); } /** * Test method for {@link FastMoney#multiply(java.lang.Number)}. */ @Test public void testMultiplyNumber(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(10, "CHF"), m.multiply(new BigDecimal("0.1"))); // the maximum value for FastMoney is 92233720368547.75807 // so this should fit right below this limit BigDecimal baseValue = new BigDecimal("90000000000000"); BigDecimal expectedValue = new BigDecimal("90000000000000.00009"); BigDecimal multiplicant = new BigDecimal("1.000000000000000001"); // verify the expected results assertEquals(0, expectedValue.compareTo(baseValue.multiply(multiplicant))); m = FastMoney.of(baseValue, "CHF"); try { m.multiply(baseValue); fail("overflow should raise ArithmeticException"); } catch (ArithmeticException e) { // should happen } } /** * Test method for {@link FastMoney#multiply(long)}. */ @Test public void testMultiplyLong(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(400, "CHF"), m.multiply(4)); assertEquals(FastMoney.of(200, "CHF"), m.multiply(2)); assertEquals(FastMoney.of(new BigDecimal("50.0"), "CHF"), m.multiply(0.5)); // Zero test m = FastMoney.of(100, "CHF"); assertEquals( m.multiply(0), FastMoney.of(0, "CHF")); m = FastMoney.of(0, "CHF"); assertEquals( m.multiply(10), FastMoney.of(0, "CHF")); try { // the maximum value for FastMoney is 92233720368547.75807 so this should overflow FastMoney.of(90000000000000L, "CHF").multiply(90000000000000L); fail("overflow should raise ArithmeticException"); } catch (ArithmeticException e) { // should happen } } /** * Test method for {@link FastMoney#multiply(double)}. */ @Test public void testMultiplyDouble(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(new BigDecimal("50.0"), "CHF"), m.multiply(0.5)); } /** * Test method for {@link FastMoney#multiply(double)}. */ @Test public void testMultiplyDoublePositiveInfinity() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.POSITIVE_INFINITY); fail("multiplying with POSITIVE_INFINITY should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with POSITIVE_INFINITY fails as expected", e); } } /** * Test method for {@link FastMoney#multiply(double)}. */ @Test public void testMultiplyDoubleNegativeInfinity() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.NEGATIVE_INFINITY); fail("multiplying with NEGATIVE_INFINITY should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with NEGATIVE_INFINITY fails as expected", e); } } /** * Test method for {@link FastMoney#multiply(double)}. */ @Test public void testMultiplyDoubleNaN() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.NaN); fail("multiplying with NaN should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with NaN fails as expected", e); } } /** * Test method for {@link FastMoney#multiply(Number)}. */ @Test public void testMultiplyNumberPositiveInfinity() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.valueOf(Double.POSITIVE_INFINITY)); fail("multiplying with POSITIVE_INFINITY should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with POSITIVE_INFINITY fails as expected", e); } } /** * Test method for {@link FastMoney#multiply(Number)}. */ @Test public void testMultiplyNumberNegativeInfinity() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.valueOf(Double.NEGATIVE_INFINITY)); fail("multiplying with NEGATIVE_INFINITY should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with NEGATIVE_INFINITY fails as expected", e); } } /** * Test method for {@link FastMoney#multiply(Number)}. */ @Test public void testMultiplyNumberNaN() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.multiply(Double.valueOf(Double.NaN)); fail("multiplying with NaN should fail"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "multiplying with NaN fails as expected", e); } } /** * Test method for {@link FastMoney#divide(double)}. */ @Test public void testDivideBadNaN() { FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); try { m.divide(Double.NaN); fail("dividing by NaN should not be allowed"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "dividing by NaN fails as expected", e); } try { m.divide(Double.valueOf(Double.NaN)); fail("dividing by h NaN should not be allowed"); } catch (ArithmeticException e) { LOG.log(Level.FINE, "dividing by NaN fails as expected", e); } } /** * Test method for {@link FastMoney#divide(double)}. */ @Test public void testDivideInfinityDoubles() { double[] values = new double[]{Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY}; FastMoney m = FastMoney.of(new BigDecimal("50.0"), "USD"); for (double d : values) { assertTrue(m.divide(d).isZero()); assertTrue(m.divide(Double.valueOf(d)).isZero()); } } /** * Test method for {@link FastMoney#negate()}. */ @Test public void testNegate(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(-100, "CHF"), m.negate()); m = FastMoney.of(-123.234, "CHF"); assertEquals(FastMoney.of(123.234, "CHF"), m.negate()); // Long.MIN_VALUE * -1 == Long.MIN_VALUE m = FastMoney.of(new BigDecimal(Long.MIN_VALUE).movePointLeft(5), "CHF"); assertTrue(m.isNegative()); try { assertFalse(m.negate().isNegative(), "FastMoney.negate() < 0 failed for " + m); } catch (ArithmeticException e) { // should happen } m = FastMoney.of(0, "CHF"); assertEquals(m.negate(), FastMoney.of(0, "CHF")); } /** * Test method for {@link FastMoney#plus()}. */ @Test public void testPlus(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(FastMoney.of(100, "CHF"), m.plus()); m = FastMoney.of(123.234, "CHF"); assertEquals(FastMoney.of(123.234, "CHF"), m.plus()); } /** * Test method for {@link FastMoney#subtract(javax.money.MonetaryAmount)} . */ @Test public void testSubtract(){ FastMoney m = FastMoney.of(100, "CHF"); FastMoney s1 = FastMoney.of(100, "CHF"); FastMoney s2 = FastMoney.of(200, "CHF"); FastMoney s3 = FastMoney.of(0, "CHF"); assertEquals(FastMoney.of(0, "CHF"), m.subtract(s1)); assertEquals(FastMoney.of(-100, "CHF"), m.subtract(s2)); assertEquals(FastMoney.of(100, "CHF"), m.subtract(s3)); assertTrue(m == m.subtract(s3)); m = FastMoney.of(-123.234, "CHF"); assertEquals(FastMoney.of(new BigDecimal("-223.234"), "CHF"), m.subtract(s1)); assertEquals(FastMoney.of(new BigDecimal("-323.234"), "CHF"), m.subtract(s2)); assertEquals(FastMoney.of(new BigDecimal("-123.234"), "CHF"), m.subtract(s3)); assertTrue(m == m.subtract(s3)); m = FastMoney.of(12.40234, "CHF"); s1 = FastMoney.of(2343.45, "CHF"); s2 = FastMoney.of(12.40234, "CHF"); s3 = FastMoney.of(-2343.45, "CHF"); assertEquals(FastMoney.of(new BigDecimal("12.40234").subtract(new BigDecimal("2343.45")), "CHF"), m.subtract(s1)); assertEquals(FastMoney.of(new BigDecimal("12.402345534").subtract(new BigDecimal("12.402345534")), "CHF"), m.subtract(s2)); assertEquals(FastMoney.of(0, "CHF"), m.subtract(s2)); assertEquals(FastMoney.of(new BigDecimal("2355.85234"), "CHF"), m.subtract(s3)); assertTrue(m == m.subtract(FastMoney.of(0, "CHF"))); } /** * Test method for {@link FastMoney#remainder(java.lang.Number)} . */ @Test public void testRemainderNumber(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF"), FastMoney.of(0, "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-723527.36532, "CHF")}; for(FastMoney m : moneys){ assertEquals(m.getFactory().setNumber( m.getNumber().numberValue(BigDecimal.class).remainder(BigDecimal.valueOf(10.50))) .create(), m.remainder(10.50), "Invalid remainder of " + 10.50 ); assertEquals(m.getFactory().setNumber( m.getNumber().numberValue(BigDecimal.class).remainder(BigDecimal.valueOf(-30.20))) .create(), m.remainder(-30.20), "Invalid remainder of " + -30.20 ); assertEquals(m.getFactory().setNumber( m.getNumber().numberValue(BigDecimal.class).remainder(BigDecimal.valueOf(-3))) .create(), m.remainder(-3),"Invalid remainder of " + -3 ); assertEquals(m.getFactory().setNumber( m.getNumber().numberValue(BigDecimal.class).remainder(BigDecimal.valueOf(3))).create(), m.remainder(3), "Invalid remainder of " + 3 ); } } /** * Test method for {@link FastMoney#scaleByPowerOfTen(int)} . */ @Test public void testScaleByPowerOfTen(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF"), FastMoney.of(0, "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-723527.36532, "CHF")}; for(FastMoney m : moneys){ for(int p = 0; p < 3; p++){ assertEquals(m.getFactory().setNumber(m.getNumber().numberValue(BigDecimal.class).scaleByPowerOfTen(p)) .create(), m.scaleByPowerOfTen(p), "Invalid scaleByPowerOfTen." ); } } moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF"), FastMoney.of(0, "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-723527.32, "CHF")}; for(FastMoney m : moneys){ for(int p = -2; p < 0; p++){ assertEquals(m.getFactory().setNumber(m.getNumber().numberValue(BigDecimal.class).scaleByPowerOfTen(p)) .create(), m.scaleByPowerOfTen(p), "Invalid scaleByPowerOfTen." ); } } } /** * Test method for {@link FastMoney#isZero()}. */ @Test public void testIsZero(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-723527.36532, "CHF")}; for(FastMoney m : moneys){ assertFalse(m.isZero()); } moneys = new FastMoney[]{FastMoney.of(0, "CHF"), FastMoney.of(0.0, "CHF"), FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(new BigDecimal("0.00000000000000000"), "CHF")}; for(FastMoney m : moneys){ assertTrue(m.isZero()); } } /** * Test method for {@link FastMoney#isPositive()}. */ @Test public void testIsPositive(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF")}; for(FastMoney m : moneys){ assertTrue(m.isPositive()); } moneys = new FastMoney[]{FastMoney.of(0, "CHF"), FastMoney.of(0.0, "CHF"), FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(new BigDecimal("0.00000000000000000"), "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-34242344, "CHF"), FastMoney.of(-23123213.435, "CHF")}; for(FastMoney m : moneys){ assertFalse(m.isPositive()); } } /** * Test method for {@link FastMoney#isPositiveOrZero()} . */ @Test public void testIsPositiveOrZero(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(0, "CHF"), FastMoney.of(0.0, "CHF"), FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(new BigDecimal("0.00000000000000000"), "CHF"), FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF")}; for(FastMoney m : moneys){ assertTrue(m.isPositiveOrZero(), "Invalid positiveOrZero (expected true): " + m); } moneys = new FastMoney[]{FastMoney.of(-100, "CHF"), FastMoney.of(-34242344, "CHF"), FastMoney.of(-23123213.435, "CHF")}; for(FastMoney m : moneys){ assertFalse(m.isPositiveOrZero(), "Invalid positiveOrZero (expected false): " + m); } } /** * Test method for {@link FastMoney#isNegative()}. */ @Test public void testIsNegative(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(0, "CHF"), FastMoney.of(0.0, "CHF"), FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(new BigDecimal("0.00000000000000000"), "CHF"), FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF")}; for(FastMoney m : moneys){ assertFalse(m.isNegative(), "Invalid isNegative (expected false): " + m); } moneys = new FastMoney[]{FastMoney.of(-100, "CHF"), FastMoney.of(-34242344, "CHF"), FastMoney.of(-23123213.435, "CHF")}; for(FastMoney m : moneys){ assertTrue(m.isNegative(), "Invalid isNegative (expected true): " + m); } } /** * Test method for {@link FastMoney#isNegativeOrZero()} . */ @Test public void testIsNegativeOrZero(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(23123213.435, "CHF")}; for(FastMoney m : moneys){ assertFalse(m.isNegativeOrZero(), "Invalid negativeOrZero (expected false): " + m); } moneys = new FastMoney[]{FastMoney.of(0, "CHF"), FastMoney.of(0.0, "CHF"), FastMoney.of(BigDecimal.ZERO, "CHF"), FastMoney.of(new BigDecimal("0.00000000000000000"), "CHF"), FastMoney.of(-100, "CHF"), FastMoney.of(-34242344, "CHF"), FastMoney.of(-23123213.435, "CHF")}; for(FastMoney m : moneys){ assertTrue(m.isNegativeOrZero(), "Invalid negativeOrZero (expected true): " + m); } } /** * Test method for {@link FastMoney#getFactory()#setNumber(java.lang.Number)} . */ @Test public void testWithNumber(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "CHF"), FastMoney.of(new BigDecimal("23123213.435"), "CHF"), FastMoney.of(new BigDecimal("-23123213.435"), "CHF"), FastMoney.of(-23123213, "CHF"), FastMoney.of(0, "CHF")}; FastMoney s = FastMoney.of(10, "CHF"); MonetaryAmount[] moneys2 = new MonetaryAmount[]{s.getFactory().setNumber(100).create(), s.getFactory().setNumber(34242344).create(), s.getFactory().setNumber(new BigDecimal("23123213.435")).create(), s.getFactory().setNumber(new BigDecimal("-23123213.435")).create(), s.getFactory().setNumber(-23123213).create(), s.getFactory().setNumber(0).create()}; for(int i = 0; i < moneys.length; i++){ assertEquals(moneys[i], moneys2[i], "with(Number) failed."); } } /** * Test method for * {@link FastMoney#getFactory()#setCurrency(javax.money.CurrencyUnit)} and {@link org * .javamoney.moneta.FastMoney#getFactory()#setNumber(java.lang.Number)} . */ @Test public void testWithCurrencyUnitNumber(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "USD"), FastMoney.of(23123213.435, "EUR"), FastMoney.of(-23123213.435, "USS"), FastMoney.of(-23123213, "USN"), FastMoney.of(0, "GBP")}; FastMoney s = FastMoney.of(10, "XXX"); MonetaryAmount[] moneys2 = new MonetaryAmount[]{ s.getFactory().setCurrency(Monetary.getCurrency("CHF")).setNumber(100).create(), s.getFactory().setCurrency(Monetary.getCurrency("USD")).setNumber(34242344).create(), s.getFactory().setCurrency(Monetary.getCurrency("EUR")) .setNumber(new BigDecimal("23123213.435")).create(), s.getFactory().setCurrency(Monetary.getCurrency("USS")) .setNumber(new BigDecimal("-23123213.435")).create(), s.getFactory().setCurrency(Monetary.getCurrency("USN")).setNumber(-23123213).create(), s.getFactory().setCurrency(Monetary.getCurrency("GBP")).setNumber(0).create()}; for(int i = 0; i < moneys.length; i++){ assertEquals(moneys[i], moneys2[i], "with(Number) failed."); } } /** * Test method for {@link FastMoney#getScale()}. */ @Test public void testGetScale(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "USD"), FastMoney.of(23123213.435, "EUR"), FastMoney.of(-23123213.435, "USS"), FastMoney.of(-23123213, "USN"), FastMoney.of(0, "GBP")}; for(FastMoney m : moneys){ assertEquals(5, m.getScale(), "Scale for " + m); } } /** * Test method for {@link FastMoney#getPrecision()}. */ @Test public void testGetPrecision(){ FastMoney[] moneys = new FastMoney[]{FastMoney.of(111, "CHF"), FastMoney.of(34242344, "USD"), FastMoney.of(23123213.435, "EUR"), FastMoney.of(-23123213.435, "USS"), FastMoney.of(-23123213, "USN"), FastMoney.of(0, "GBP")}; for(FastMoney m : moneys){ assertEquals(m.getNumber().numberValue(BigDecimal.class).precision(), m .getPrecision(), "Precision for " + m); } } /** * Test method for {@link FastMoney#getNumber()#longValue()}. */ @Test(expectedExceptions = ArithmeticException.class) public void testLongValue(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(100L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney.of(-100, "CHF"); assertEquals(-100L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney.of(-100.3434, "CHF"); assertEquals(-100L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney.of(100.3434, "CHF"); assertEquals(100L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney.of(0, "CHF"); assertEquals(0L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney.of(-0.0, "CHF"); assertEquals(0L, m.getNumber().longValue(), "longValue of " + m); m = FastMoney .of(new BigDecimal( "12121762517652176251725178251872652765321876352187635217835378125"), "CHF"); fail("longValue(12121762517652176251725178251872652765321876352187635217835378125) should fail!"); } /** * Test method for {@link FastMoney#getNumber()#longValueExact()}. */ @Test public void testLongValueExact(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(100L, m.getNumber().longValueExact(), "longValue of " + m); m = FastMoney.of(-100, "CHF"); assertEquals(-100L, m.getNumber().longValueExact(), "longValue of " + m); m = FastMoney.of(0, "CHF"); assertEquals(0L, m.getNumber().longValueExact(), "longValue of " + m); m = FastMoney.of(-0.0, "CHF"); assertEquals(0L, m.getNumber().longValueExact(), "longValue of " + m); try{ m = FastMoney.of(Long.MAX_VALUE, "CHF"); fail("longValueExact(12121762517652176251725178251872652765321876352187635217835378125) should fail!"); } catch(ArithmeticException e){ // OK } try{ m = FastMoney.of(Long.MIN_VALUE, "CHF"); fail("longValueExact(-100.3434) should raise an ArithmeticException."); } catch(ArithmeticException e){ // OK } try{ m = FastMoney.of(100.3434, "CHF"); m.getNumber().longValueExact(); fail("longValueExact(100.3434) should raise an ArithmeticException."); } catch(ArithmeticException e){ // OK } } /** * Test method for {@link FastMoney#getNumber()#doubleValue()}. */ @Test(expectedExceptions = ArithmeticException.class) public void testDoubleValue(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(100d, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney.of(-100, "CHF"); assertEquals(-100d, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney.of(-100.3434, "CHF"); assertEquals(-100.3434, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney.of(100.3434, "CHF"); assertEquals(100.3434, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney.of(0, "CHF"); assertEquals(0d, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney.of(-0.0, "CHF"); assertEquals(0d, m.getNumber().doubleValue(), 0.0d, "doubleValue of " + m); m = FastMoney .of(new BigDecimal( "12121762517652176251725178251872652765321876352187635217835378125"), "CHF"); m.getNumber().doubleValue(); fail("doubleValue(12121762517652176251725178251872652765321876352187635217835378125) should fail!"); } /** * Test method for {@link FastMoney#signum()}. */ @Test public void testSignum(){ FastMoney m = FastMoney.of(100, "CHF"); assertEquals(1, m.signum(), "signum of " + m); m = FastMoney.of(-100, "CHF"); assertEquals(-1, m.signum(), "signum of " + m); m = FastMoney.of(100.3435, "CHF"); assertEquals(1, m.signum(), "signum of " + m); m = FastMoney.of(-100.3435, "CHF"); assertEquals(-1, m.signum(), "signum of " + m); m = FastMoney.of(0, "CHF"); assertEquals(0, m.signum(), "signum of " + m); m = FastMoney.of(-0, "CHF"); assertEquals(0, m.signum(), "signum of " + m); } /** * Test method for {@link FastMoney#isLessThan(javax.money.MonetaryAmount)} * . */ @Test public void testIsLessThan(){ assertFalse(FastMoney.of(BigDecimal.valueOf(0d), "CHF").isLessThan(FastMoney.of(BigDecimal.valueOf(0), "CHF"))); assertFalse(FastMoney.of(BigDecimal.valueOf(0.00001d), "CHF") .isLessThan(FastMoney.of(BigDecimal.valueOf(0d), "CHF"))); assertFalse(FastMoney.of(15, "CHF").isLessThan(FastMoney.of(10, "CHF"))); assertFalse(FastMoney.of(15.546, "CHF").isLessThan(FastMoney.of(10.34, "CHF"))); assertTrue(FastMoney.of(5, "CHF").isLessThan(FastMoney.of(10, "CHF"))); assertTrue(FastMoney.of(5.546, "CHF").isLessThan(FastMoney.of(10.34, "CHF"))); } /** * Test method for * {@link FastMoney#isLessThanOrEqualTo(javax.money.MonetaryAmount)} . */ @Test public void testIsLessThanOrEqualTo(){ assertTrue(FastMoney.of(BigDecimal.valueOf(0d), "CHF") .isLessThanOrEqualTo(FastMoney.of(BigDecimal.valueOf(0), "CHF"))); assertFalse(FastMoney.of(BigDecimal.valueOf(0.00001d), "CHF") .isLessThanOrEqualTo(FastMoney.of(BigDecimal.valueOf(0d), "CHF"))); assertFalse(FastMoney.of(15, "CHF").isLessThanOrEqualTo(FastMoney.of(10, "CHF"))); assertFalse(FastMoney.of(15.546, "CHF").isLessThan(FastMoney.of(10.34, "CHF"))); assertTrue(FastMoney.of(5, "CHF").isLessThanOrEqualTo(FastMoney.of(10, "CHF"))); assertTrue(FastMoney.of(5.546, "CHF").isLessThanOrEqualTo(FastMoney.of(10.34, "CHF"))); } /** * Test method for * {@link FastMoney#isGreaterThan(javax.money.MonetaryAmount)} . */ @Test public void testIsGreaterThan(){ assertFalse( FastMoney.of(BigDecimal.valueOf(0d), "CHF").isGreaterThan(FastMoney.of(BigDecimal.valueOf(0), "CHF"))); assertTrue(FastMoney.of(BigDecimal.valueOf(0.00001d), "CHF") .isGreaterThan(FastMoney.of(BigDecimal.valueOf(0d), "CHF"))); assertTrue(FastMoney.of(15, "CHF").isGreaterThan(FastMoney.of(10, "CHF"))); assertTrue(FastMoney.of(15.546, "CHF").isGreaterThan(FastMoney.of(10.34, "CHF"))); assertFalse(FastMoney.of(5, "CHF").isGreaterThan(FastMoney.of(10, "CHF"))); assertFalse(FastMoney.of(5.546, "CHF").isGreaterThan(FastMoney.of(10.34, "CHF"))); } /** * Test method for * {@link FastMoney#isGreaterThanOrEqualTo(javax.money.MonetaryAmount)} . */ @Test public void testIsGreaterThanOrEqualTo(){ assertTrue(FastMoney.of(BigDecimal.valueOf(0d), "CHF") .isGreaterThanOrEqualTo(FastMoney.of(BigDecimal.valueOf(0), "CHF"))); assertTrue(FastMoney.of(BigDecimal.valueOf(0.00001d), "CHF") .isGreaterThanOrEqualTo(FastMoney.of(BigDecimal.valueOf(0d), "CHF"))); assertTrue(FastMoney.of(15, "CHF").isGreaterThanOrEqualTo(FastMoney.of(10, "CHF"))); assertTrue(FastMoney.of(15.546, "CHF").isGreaterThanOrEqualTo(FastMoney.of(10.34, "CHF"))); assertFalse(FastMoney.of(5, "CHF").isGreaterThanOrEqualTo(FastMoney.of(10, "CHF"))); assertFalse(FastMoney.of(5.546, "CHF").isGreaterThanOrEqualTo(FastMoney.of(10.34, "CHF"))); } /** * Test method for {@link FastMoney#isEqualTo(javax.money.MonetaryAmount)} * . */ @Test public void testIsEqualTo(){ assertTrue(FastMoney.of(BigDecimal.valueOf(0d), "CHF").isEqualTo(FastMoney.of(BigDecimal.valueOf(0), "CHF"))); assertFalse(FastMoney.of(BigDecimal.valueOf(0.00001d), "CHF") .isEqualTo(FastMoney.of(BigDecimal.valueOf(0d), "CHF"))); assertTrue(FastMoney.of(BigDecimal.valueOf(5d), "CHF").isEqualTo(FastMoney.of(BigDecimal.valueOf(5), "CHF"))); assertTrue( FastMoney.of(BigDecimal.valueOf(1d), "CHF").isEqualTo(FastMoney.of(BigDecimal.valueOf(1.00), "CHF"))); assertTrue(FastMoney.of(BigDecimal.valueOf(1d), "CHF").isEqualTo(FastMoney.of(BigDecimal.ONE, "CHF"))); assertTrue(FastMoney.of(BigDecimal.valueOf(1), "CHF").isEqualTo(FastMoney.of(BigDecimal.ONE, "CHF"))); assertTrue( FastMoney.of(new BigDecimal("1.0000"), "CHF").isEqualTo(FastMoney.of(new BigDecimal("1.00"), "CHF"))); } /** * Test method for {@link FastMoney#getNumber()}. */ @Test public void testGetImplementationType(){ assertEquals(FastMoney.of(0, "CHF").getContext().getAmountType(), FastMoney.class); assertEquals(FastMoney.of(0.34746d, "CHF").getContext().getAmountType(), FastMoney.class); assertEquals(FastMoney.of(100034L, "CHF").getContext().getAmountType(), FastMoney.class); } /** * Test method for {@link FastMoney#query(javax.money.MonetaryQuery)}. */ @Test public void testQuery(){ MonetaryQuery<Integer> q = new MonetaryQuery<Integer>() { @Override public Integer queryFrom(MonetaryAmount amount) { return FastMoney.from(amount).getPrecision(); } }; FastMoney[] moneys = new FastMoney[]{FastMoney.of(100, "CHF"), FastMoney.of(34242344, "USD"), FastMoney.of(23123213.435, "EUR"), FastMoney.of(-23123213.435, "USS"), FastMoney.of(-23123213, "USN"), FastMoney.of(0, "GBP")}; for (FastMoney money : moneys) { assertEquals(money.query(q), (Integer) money.getPrecision()); } } /** * Test method for {@link FastMoney#getNumber()#asType(java.lang.Class)}. */ @Test public void testGetNumberClassOfT(){ FastMoney m = FastMoney.of(13.656, "CHF"); assertEquals(m.getNumber().numberValue(Byte.class), Byte.valueOf((byte) 13)); assertEquals(m.getNumber().numberValue(Short.class), Short.valueOf((short) 13)); assertEquals(m.getNumber().numberValue(Integer.class), Integer.valueOf(13)); assertEquals(m.getNumber().numberValue(Long.class), Long.valueOf(13L)); assertEquals(m.getNumber().numberValue(Float.class), 13.656f); assertEquals(m.getNumber().numberValue(Double.class), 13.656); assertEquals(m.getNumber().numberValue(BigDecimal.class), new BigDecimal("13.656")); } /** * Test method for {@link FastMoney#getNumber()#asNumber()}. */ @Test public void testGetNumber(){ assertEquals(BigDecimal.ZERO, FastMoney.of(0, "CHF").getNumber().numberValue(BigDecimal.class)); assertEquals(new BigDecimal("100034"), FastMoney.of(100034L, "CHF").getNumber().numberValue(BigDecimal.class)); assertEquals(new BigDecimal("0.34738"), FastMoney.of(new BigDecimal("0.34738"), "CHF").getNumber().numberValue(BigDecimal.class)); } /** * Test method for {@link FastMoney#toString()}. */ @Test public void testToString(){ assertEquals("XXX 1.23455", FastMoney.of(new BigDecimal("1.23455"), "XXX").toString()); assertEquals("CHF 1234.00000", FastMoney.of(1234, "CHF").toString()); assertEquals("CHF 1234.00000", FastMoney.of(new BigDecimal("1234.0"), "CHF").toString()); assertEquals("CHF 1234.10000", FastMoney.of(new BigDecimal("1234.1"), "CHF").toString()); assertEquals("CHF 0.01000", FastMoney.of(new BigDecimal("0.0100"), "CHF").toString()); } /** * Test method for {@link FastMoney#with(javax.money.MonetaryOperator)} . */ @Test public void testWithMonetaryOperator(){ MonetaryOperator adj = new MonetaryOperator(){ @Override public MonetaryAmount apply(MonetaryAmount amount) { return FastMoney.of(-100, amount.getCurrency()); } }; FastMoney m = FastMoney.of(new BigDecimal("1.2345"), "XXX"); FastMoney a = m.with(adj); assertNotNull(a); assertNotSame(m, a); assertEquals(m.getCurrency(), a.getCurrency()); assertEquals(FastMoney.of(-100, m.getCurrency()), a); adj = new MonetaryOperator(){ @Override public MonetaryAmount apply(MonetaryAmount amount) { return amount.multiply(2).getFactory().setCurrency(Monetary.getCurrency("CHF")).create(); } }; a = m.with(adj); assertNotNull(a); assertNotSame(m, a); assertEquals(Monetary.getCurrency("CHF"), a.getCurrency()); assertEquals(FastMoney.of(1.2345 * 2, a.getCurrency()), a); } /** * Test method for {@link FastMoney#from(javax.money.MonetaryAmount)}. */ @Test public void testFrom(){ FastMoney m = FastMoney.of(new BigDecimal("1.2345"), "XXX"); FastMoney m2 = FastMoney.from(m); assertTrue(m == m2); Money fm = Money.of(new BigDecimal("1.2345"), "XXX"); m2 = FastMoney.from(fm); assertFalse(m == m2); assertEquals(m, m2); } @Test public void testSerialization() throws IOException, ClassNotFoundException{ FastMoney m = FastMoney.of(new BigDecimal("1.2345"), "XXX"); ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(m); oos.flush(); ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bos.toByteArray())); FastMoney m2 = (FastMoney) ois.readObject(); assertEquals(m, m2); assertTrue(m != m2); } @Test public void parseTest() { FastMoney money = FastMoney.parse("EUR 25.25"); assertEquals(money.getCurrency(), EURO); assertEquals(money.getNumber().doubleValue(), 25.25); } /** * Test method for {@link Money#from(javax.money.MonetaryAmount)}. */ @Test public void testFromInversed(){ Money m = Money.of(new BigDecimal("1.2345"), "XXX"); Money m2 = Money.from(m); assertTrue(m == m2); FastMoney fm = FastMoney.of(new BigDecimal("1.2345"), "XXX"); m2 = Money.from(fm); assertFalse(m == m2); assertEquals(m, m2); } @Test(expectedExceptions = ArithmeticException.class) public void testCreatingFromDoubleNan(){ FastMoney.of(Double.NaN, "XXX"); } @Test(expectedExceptions = ArithmeticException.class) public void testCreatingFromDoublePositiveInfinity(){ FastMoney.of(Double.POSITIVE_INFINITY, "XXX"); } @Test(expectedExceptions = ArithmeticException.class) public void testCreatingFromDoubleNegativeInfinity(){ FastMoney.of(Double.NEGATIVE_INFINITY, "XXX"); } @Test(expectedExceptions = NullPointerException.class) public void shouldRerturnErrorWhenUsingZeroTheCurrencyIsNull() { FastMoney.zero(null); Assert.fail(); } @Test public void shouldRerturnZeroWhenUsingZero() { MonetaryAmount zero = FastMoney.zero(DOLLAR); assertEquals(BigDecimal.ZERO, zero.getNumber().numberValue(BigDecimal.class)); assertEquals(DOLLAR, zero.getCurrency()); } @Test(expectedExceptions = NullPointerException.class) public void shouldRerturnErrorWhenUsingOfMinorTheCurrencyIsNull() { FastMoney.ofMinor(null, 1234L); Assert.fail(); } @Test public void shouldRerturnMonetaryAmount() { MonetaryAmount amount = FastMoney.ofMinor(DOLLAR, 1234L); assertEquals(Double.valueOf(12.34), amount.getNumber().doubleValue()); assertEquals(DOLLAR, amount.getCurrency()); } @Test(expectedExceptions = IllegalArgumentException.class) public void shouldReturnErrorWhenCurrencyIsInvalid() { FastMoney.ofMinor(new InvalidCurrency(), 1234L); } @Test(expectedExceptions = IllegalArgumentException.class) public void shouldReturnErrorWhenFractionDigitIsNegative() { FastMoney.ofMinor(DOLLAR, 1234L, -2); } @Test public void shouldRerturnMonetaryAmountUsingFractionDigits() { MonetaryAmount amount = FastMoney.ofMinor(DOLLAR, 1234L, 3); assertEquals(Double.valueOf(1.234), amount.getNumber().doubleValue()); assertEquals(DOLLAR, amount.getCurrency()); } }
This additional test shows the bug in the overflow detection in the multiplyExact Method
src/test/java/org/javamoney/moneta/FastMoneyTest.java
This additional test shows the bug in the overflow detection in the multiplyExact Method
Java
apache-2.0
be7f8b51448337f19725db363e8e67608b8648dd
0
apache/solr,apache/solr,apache/solr,apache/solr,apache/solr
package org.apache.solr.schema; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.analysis.util.ResourceLoader; import org.apache.lucene.analysis.util.ResourceLoaderAware; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.StorableField; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.Query; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Filter; import org.apache.lucene.search.FieldValueFilter; import org.apache.lucene.queries.ChainedFilter; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.response.TextResponseWriter; import org.apache.solr.search.QParser; import org.apache.solr.search.SolrConstantScoreQuery; import org.apache.solr.search.function.ValueSourceRangeFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Currency; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * Field type for support of monetary values. * <p> * See <a href="http://wiki.apache.org/solr/CurrencyField">http://wiki.apache.org/solr/CurrencyField</a> */ public class CurrencyField extends FieldType implements SchemaAware, ResourceLoaderAware { protected static final String PARAM_DEFAULT_CURRENCY = "defaultCurrency"; protected static final String PARAM_RATE_PROVIDER_CLASS = "providerClass"; protected static final Object PARAM_PRECISION_STEP = "precisionStep"; protected static final String DEFAULT_RATE_PROVIDER_CLASS = "solr.FileExchangeRateProvider"; protected static final String DEFAULT_DEFAULT_CURRENCY = "USD"; protected static final String DEFAULT_PRECISION_STEP = "0"; protected static final String FIELD_SUFFIX_AMOUNT_RAW = "_amount_raw"; protected static final String FIELD_SUFFIX_CURRENCY = "_currency"; private IndexSchema schema; protected FieldType fieldTypeCurrency; protected FieldType fieldTypeAmountRaw; private String exchangeRateProviderClass; private String defaultCurrency; private ExchangeRateProvider provider; public static Logger log = LoggerFactory.getLogger(CurrencyField.class); /** * A wrapper arround <code>Currency.getInstance</code> that returns null * instead of throwing <code>IllegalArgumentException</code> * if the specified Currency does not exist in this JVM. * * @see Currency#getInstance(String) */ public static Currency getCurrency(final String code) { try { return Currency.getInstance(code); } catch (IllegalArgumentException e) { /* :NOOP: */ } return null; } @Override protected void init(IndexSchema schema, Map<String, String> args) { super.init(schema, args); if (this.isMultiValued()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "CurrencyField types can not be multiValued: " + this.typeName); } this.schema = schema; this.exchangeRateProviderClass = args.get(PARAM_RATE_PROVIDER_CLASS); this.defaultCurrency = args.get(PARAM_DEFAULT_CURRENCY); if (this.defaultCurrency == null) { this.defaultCurrency = DEFAULT_DEFAULT_CURRENCY; } if (this.exchangeRateProviderClass == null) { this.exchangeRateProviderClass = DEFAULT_RATE_PROVIDER_CLASS; } if (null == getCurrency(this.defaultCurrency)) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Default currency code is not supported by this JVM: " + this.defaultCurrency); } String precisionStepString = args.get(PARAM_PRECISION_STEP); if (precisionStepString == null) { precisionStepString = DEFAULT_PRECISION_STEP; } // Initialize field type for amount fieldTypeAmountRaw = new TrieLongField(); fieldTypeAmountRaw.setTypeName("amount_raw_type_tlong"); Map<String,String> map = new HashMap<String,String>(1); map.put("precisionStep", precisionStepString); fieldTypeAmountRaw.init(schema, map); // Initialize field type for currency string fieldTypeCurrency = new StrField(); fieldTypeCurrency.setTypeName("currency_type_string"); fieldTypeCurrency.init(schema, new HashMap<String,String>()); args.remove(PARAM_RATE_PROVIDER_CLASS); args.remove(PARAM_DEFAULT_CURRENCY); args.remove(PARAM_PRECISION_STEP); try { Class<? extends ExchangeRateProvider> c = schema.getResourceLoader().findClass(exchangeRateProviderClass, ExchangeRateProvider.class); provider = c.newInstance(); provider.init(args); } catch (Exception e) { throw new SolrException(ErrorCode.BAD_REQUEST, "Error instantiating exhange rate provider "+exchangeRateProviderClass+": " + e.getMessage(), e); } } @Override public boolean isPolyField() { return true; } @Override public void checkSchemaField(final SchemaField field) throws SolrException { super.checkSchemaField(field); if (field.multiValued()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "CurrencyFields can not be multiValued: " + field.getName()); } } @Override public List<StorableField> createFields(SchemaField field, Object externalVal, float boost) { CurrencyValue value = CurrencyValue.parse(externalVal.toString(), defaultCurrency); List<StorableField> f = new ArrayList<StorableField>(); SchemaField amountField = getAmountField(field); f.add(amountField.createField(String.valueOf(value.getAmount()), amountField.indexed() && !amountField.omitNorms() ? boost : 1F)); SchemaField currencyField = getCurrencyField(field); f.add(currencyField.createField(value.getCurrencyCode(), currencyField.indexed() && !currencyField.omitNorms() ? boost : 1F)); if (field.stored()) { org.apache.lucene.document.FieldType customType = new org.apache.lucene.document.FieldType(); assert !customType.omitNorms(); customType.setStored(true); String storedValue = externalVal.toString().trim(); if (storedValue.indexOf(",") < 0) { storedValue += "," + defaultCurrency; } f.add(createField(field.getName(), storedValue, customType, 1F)); } return f; } private SchemaField getAmountField(SchemaField field) { return schema.getField(field.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_AMOUNT_RAW); } private SchemaField getCurrencyField(SchemaField field) { return schema.getField(field.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_CURRENCY); } private void createDynamicCurrencyField(String suffix, FieldType type) { String name = "*" + POLY_FIELD_SEPARATOR + suffix; Map<String, String> props = new HashMap<String, String>(); props.put("indexed", "true"); props.put("stored", "false"); props.put("multiValued", "false"); props.put("omitNorms", "true"); int p = SchemaField.calcProps(name, type, props); schema.registerDynamicField(SchemaField.create(name, type, p, null)); } /** * When index schema is informed, add dynamic fields. * * @param indexSchema The index schema. */ @Override public void inform(IndexSchema indexSchema) { createDynamicCurrencyField(FIELD_SUFFIX_CURRENCY, fieldTypeCurrency); createDynamicCurrencyField(FIELD_SUFFIX_AMOUNT_RAW, fieldTypeAmountRaw); } /** * Load the currency config when resource loader initialized. * * @param resourceLoader The resource loader. */ @Override public void inform(ResourceLoader resourceLoader) { provider.inform(resourceLoader); boolean reloaded = provider.reload(); if(!reloaded) { log.warn("Failed reloading currencies"); } } @Override public Query getFieldQuery(QParser parser, SchemaField field, String externalVal) { CurrencyValue value = CurrencyValue.parse(externalVal, defaultCurrency); CurrencyValue valueDefault; valueDefault = value.convertTo(provider, defaultCurrency); return getRangeQuery(parser, field, valueDefault, valueDefault, true, true); } /** * <p> * Returns a ValueSource over this field in which the numeric value for * each document represents the indexed value as converted to the default * currency for the field, normalized to it's most granular form based * on the default fractional digits. * </p> * <p> * For example: If the default Currency specified for a field is * <code>USD</code>, then the values returned by this value source would * represent the equivilent number of "cents" (ie: value in dollars * 100) * after converting each document's native currency to USD -- because the * default fractional digits for <code>USD</code> is "<code>2</code>". * So for a document whose indexed value was currently equivilent to * "<code>5.43,USD</code>" using the the exchange provider for this field, * this ValueSource would return a value of "<code>543</code>" * </p> * * @see #PARAM_DEFAULT_CURRENCY * @see #DEFAULT_DEFAULT_CURRENCY * @see Currency#getDefaultFractionDigits * @see #getConvertedValueSource */ public RawCurrencyValueSource getValueSource(SchemaField field, QParser parser) { field.checkFieldCacheSource(parser); return new RawCurrencyValueSource(field, defaultCurrency, parser); } /** * <p> * Returns a ValueSource over this field in which the numeric value for * each document represents the value from the underlying * <code>RawCurrencyValueSource</code> as converted to the specified target * Currency. * </p> * <p> * For example: If the <code>targetCurrencyCode</code> param is set to * <code>USD</code>, then the values returned by this value source would * represent the equivilent number of dollars after converting each * document's raw value to <code>USD</code>. So for a document whose * indexed value was currently equivilent to "<code>5.43,USD</code>" * using the the exchange provider for this field, this ValueSource would * return a value of "<code>5.43</code>" * </p> * * @param targetCurrencyCode The target currency for the resulting value source, if null the defaultCurrency for this field type will be used * @param source the raw ValueSource to wrap * @see #PARAM_DEFAULT_CURRENCY * @see #DEFAULT_DEFAULT_CURRENCY * @see #getValueSource */ public ValueSource getConvertedValueSource(String targetCurrencyCode, RawCurrencyValueSource source) { if (null == targetCurrencyCode) { targetCurrencyCode = defaultCurrency; } return new ConvertedCurrencyValueSource(targetCurrencyCode, source); } @Override public Query getRangeQuery(QParser parser, SchemaField field, String part1, String part2, final boolean minInclusive, final boolean maxInclusive) { final CurrencyValue p1 = CurrencyValue.parse(part1, defaultCurrency); final CurrencyValue p2 = CurrencyValue.parse(part2, defaultCurrency); if (p1 != null && p2 != null && !p1.getCurrencyCode().equals(p2.getCurrencyCode())) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot parse range query " + part1 + " to " + part2 + ": range queries only supported when upper and lower bound have same currency."); } return getRangeQuery(parser, field, p1, p2, minInclusive, maxInclusive); } public Query getRangeQuery(QParser parser, SchemaField field, final CurrencyValue p1, final CurrencyValue p2, final boolean minInclusive, final boolean maxInclusive) { String currencyCode = (p1 != null) ? p1.getCurrencyCode() : (p2 != null) ? p2.getCurrencyCode() : defaultCurrency; // ValueSourceRangeFilter doesn't check exists(), so we have to final Filter docsWithValues = new FieldValueFilter(getAmountField(field).getName()); final Filter vsRangeFilter = new ValueSourceRangeFilter (new RawCurrencyValueSource(field, currencyCode, parser), p1 == null ? null : p1.getAmount() + "", p2 == null ? null : p2.getAmount() + "", minInclusive, maxInclusive); final Filter docsInRange = new ChainedFilter (new Filter [] { docsWithValues, vsRangeFilter }, ChainedFilter.AND); return new SolrConstantScoreQuery(docsInRange); } @Override public SortField getSortField(SchemaField field, boolean reverse) { // Convert all values to default currency for sorting. return (new RawCurrencyValueSource(field, defaultCurrency, null)).getSortField(reverse); } @Override public void write(TextResponseWriter writer, String name, StorableField field) throws IOException { writer.writeStr(name, field.stringValue(), true); } public ExchangeRateProvider getProvider() { return provider; } /** * <p> * A value source whose values represent the "normal" values * in the specified target currency. * </p> * @see RawCurrencyValueSource */ class ConvertedCurrencyValueSource extends ValueSource { private final Currency targetCurrency; private final RawCurrencyValueSource source; private final double rate; public ConvertedCurrencyValueSource(String targetCurrencyCode, RawCurrencyValueSource source) { this.source = source; this.targetCurrency = getCurrency(targetCurrencyCode); if (null == targetCurrency) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Currency code not supported by this JVM: " + targetCurrencyCode); } // the target digits & currency of our source, // become the source digits & currency of ourselves this.rate = provider.getExchangeRate (source.getTargetCurrency().getCurrencyCode(), targetCurrency.getCurrencyCode()); } @Override public FunctionValues getValues(Map context, AtomicReaderContext reader) throws IOException { final FunctionValues amounts = source.getValues(context, reader); // the target digits & currency of our source, // become the source digits & currency of ourselves final String sourceCurrencyCode = source.getTargetCurrency().getCurrencyCode(); final int sourceFractionDigits = source.getTargetCurrency().getDefaultFractionDigits(); final double divisor = Math.pow(10D, targetCurrency.getDefaultFractionDigits()); return new FunctionValues() { @Override public boolean exists(int doc) { return amounts.exists(doc); } @Override public long longVal(int doc) { return (long) doubleVal(doc); } @Override public int intVal(int doc) { return (int) doubleVal(doc); } @Override public double doubleVal(int doc) { return CurrencyValue.convertAmount(rate, sourceCurrencyCode, amounts.longVal(doc), targetCurrency.getCurrencyCode()) / divisor; } @Override public float floatVal(int doc) { return CurrencyValue.convertAmount(rate, sourceCurrencyCode, amounts.longVal(doc), targetCurrency.getCurrencyCode()) / ((float)divisor); } @Override public String strVal(int doc) { return Double.toString(doubleVal(doc)); } @Override public String toString(int doc) { return name() + '(' + strVal(doc) + ')'; } }; } public String name() { return "currency"; } @Override public String description() { return name() + "(" + source.getField().getName() + "," + targetCurrency.getCurrencyCode()+")"; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ConvertedCurrencyValueSource that = (ConvertedCurrencyValueSource) o; return !(source != null ? !source.equals(that.source) : that.source != null) && (rate == that.rate) && !(targetCurrency != null ? !targetCurrency.equals(that.targetCurrency) : that.targetCurrency != null); } @Override public int hashCode() { int result = targetCurrency != null ? targetCurrency.hashCode() : 0; result = 31 * result + (source != null ? source.hashCode() : 0); result = 31 * (int) Double.doubleToLongBits(rate); return result; } } /** * <p> * A value source whose values represent the "raw" (ie: normalized using * the number of default fractional digits) values in the specified * target currency). * </p> * <p> * For example: if the specified target currency is "<code>USD</code>" * then the numeric values are the number of pennies in the value * (ie: <code>$n * 100</code>) since the number of defalt fractional * digits for <code>USD</code> is "<code>2</code>") * </p> * @see ConvertedCurrencyValueSource */ class RawCurrencyValueSource extends ValueSource { private static final long serialVersionUID = 1L; private final Currency targetCurrency; private ValueSource currencyValues; private ValueSource amountValues; private final SchemaField sf; public RawCurrencyValueSource(SchemaField sfield, String targetCurrencyCode, QParser parser) { this.sf = sfield; this.targetCurrency = getCurrency(targetCurrencyCode); if (null == targetCurrency) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Currency code not supported by this JVM: " + targetCurrencyCode); } SchemaField amountField = schema.getField(sf.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_AMOUNT_RAW); SchemaField currencyField = schema.getField(sf.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_CURRENCY); currencyValues = currencyField.getType().getValueSource(currencyField, parser); amountValues = amountField.getType().getValueSource(amountField, parser); } public SchemaField getField() { return sf; } public Currency getTargetCurrency() { return targetCurrency; } @Override public FunctionValues getValues(Map context, AtomicReaderContext reader) throws IOException { final FunctionValues amounts = amountValues.getValues(context, reader); final FunctionValues currencies = currencyValues.getValues(context, reader); return new FunctionValues() { private final int MAX_CURRENCIES_TO_CACHE = 256; private final int[] fractionDigitCache = new int[MAX_CURRENCIES_TO_CACHE]; private final String[] currencyOrdToCurrencyCache = new String[MAX_CURRENCIES_TO_CACHE]; private final double[] exchangeRateCache = new double[MAX_CURRENCIES_TO_CACHE]; private int targetFractionDigits = -1; private int targetCurrencyOrd = -1; private boolean initializedCache; private String getDocCurrencyCode(int doc, int currencyOrd) { if (currencyOrd < MAX_CURRENCIES_TO_CACHE) { String currency = currencyOrdToCurrencyCache[currencyOrd]; if (currency == null) { currencyOrdToCurrencyCache[currencyOrd] = currency = currencies.strVal(doc); } if (currency == null) { currency = defaultCurrency; } if (targetCurrencyOrd == -1 && currency.equals(targetCurrency.getCurrencyCode() )) { targetCurrencyOrd = currencyOrd; } return currency; } else { return currencies.strVal(doc); } } /** throws a (Server Error) SolrException if the code is not valid */ private Currency getDocCurrency(int doc, int currencyOrd) { String code = getDocCurrencyCode(doc, currencyOrd); Currency c = getCurrency(code); if (null == c) { throw new SolrException (SolrException.ErrorCode.SERVER_ERROR, "Currency code of document is not supported by this JVM: "+code); } return c; } @Override public boolean exists(int doc) { return amounts.exists(doc); } @Override public long longVal(int doc) { long amount = amounts.longVal(doc); // bail fast using whatever ammounts defaults to if no value // (if we don't do this early, currencyOrd may be < 0, // causing index bounds exception if ( ! exists(doc) ) { return amount; } if (!initializedCache) { for (int i = 0; i < fractionDigitCache.length; i++) { fractionDigitCache[i] = -1; } initializedCache = true; } int currencyOrd = currencies.ordVal(doc); if (currencyOrd == targetCurrencyOrd) { return amount; } double exchangeRate; int sourceFractionDigits; if (targetFractionDigits == -1) { targetFractionDigits = targetCurrency.getDefaultFractionDigits(); } if (currencyOrd < MAX_CURRENCIES_TO_CACHE) { exchangeRate = exchangeRateCache[currencyOrd]; if (exchangeRate <= 0.0) { String sourceCurrencyCode = getDocCurrencyCode(doc, currencyOrd); exchangeRate = exchangeRateCache[currencyOrd] = provider.getExchangeRate(sourceCurrencyCode, targetCurrency.getCurrencyCode()); } sourceFractionDigits = fractionDigitCache[currencyOrd]; if (sourceFractionDigits == -1) { sourceFractionDigits = fractionDigitCache[currencyOrd] = getDocCurrency(doc, currencyOrd).getDefaultFractionDigits(); } } else { Currency source = getDocCurrency(doc, currencyOrd); exchangeRate = provider.getExchangeRate(source.getCurrencyCode(), targetCurrency.getCurrencyCode()); sourceFractionDigits = source.getDefaultFractionDigits(); } return CurrencyValue.convertAmount(exchangeRate, sourceFractionDigits, amount, targetFractionDigits); } @Override public int intVal(int doc) { return (int) longVal(doc); } @Override public double doubleVal(int doc) { return (double) longVal(doc); } @Override public float floatVal(int doc) { return (float) longVal(doc); } @Override public String strVal(int doc) { return Long.toString(longVal(doc)); } @Override public String toString(int doc) { return name() + '(' + amounts.toString(doc) + ',' + currencies.toString(doc) + ')'; } }; } public String name() { return "rawcurrency"; } @Override public String description() { return name() + "(" + sf.getName() + ",target="+targetCurrency.getCurrencyCode()+")"; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RawCurrencyValueSource that = (RawCurrencyValueSource) o; return !(amountValues != null ? !amountValues.equals(that.amountValues) : that.amountValues != null) && !(currencyValues != null ? !currencyValues.equals(that.currencyValues) : that.currencyValues != null) && !(targetCurrency != null ? !targetCurrency.equals(that.targetCurrency) : that.targetCurrency != null); } @Override public int hashCode() { int result = targetCurrency != null ? targetCurrency.hashCode() : 0; result = 31 * result + (currencyValues != null ? currencyValues.hashCode() : 0); result = 31 * result + (amountValues != null ? amountValues.hashCode() : 0); return result; } } } /** * Configuration for currency. Provides currency exchange rates. */ class FileExchangeRateProvider implements ExchangeRateProvider { public static Logger log = LoggerFactory.getLogger(FileExchangeRateProvider.class); protected static final String PARAM_CURRENCY_CONFIG = "currencyConfig"; // Exchange rate map, maps Currency Code -> Currency Code -> Rate private Map<String, Map<String, Double>> rates = new HashMap<String, Map<String, Double>>(); private String currencyConfigFile; private ResourceLoader loader; /** * Returns the currently known exchange rate between two currencies. If a direct rate has been loaded, * it is used. Otherwise, if a rate is known to convert the target currency to the source, the inverse * exchange rate is computed. * * @param sourceCurrencyCode The source currency being converted from. * @param targetCurrencyCode The target currency being converted to. * @return The exchange rate. * @throws SolrException if the requested currency pair cannot be found */ @Override public double getExchangeRate(String sourceCurrencyCode, String targetCurrencyCode) { if (sourceCurrencyCode == null || targetCurrencyCode == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot get exchange rate; currency was null."); } if (sourceCurrencyCode.equals(targetCurrencyCode)) { return 1.0; } Double directRate = lookupRate(sourceCurrencyCode, targetCurrencyCode); if (directRate != null) { return directRate; } Double symmetricRate = lookupRate(targetCurrencyCode, sourceCurrencyCode); if (symmetricRate != null) { return 1.0 / symmetricRate; } throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No available conversion rate between " + sourceCurrencyCode + " to " + targetCurrencyCode); } /** * Looks up the current known rate, if any, between the source and target currencies. * * @param sourceCurrencyCode The source currency being converted from. * @param targetCurrencyCode The target currency being converted to. * @return The exchange rate, or null if no rate has been registered. */ private Double lookupRate(String sourceCurrencyCode, String targetCurrencyCode) { Map<String, Double> rhs = rates.get(sourceCurrencyCode); if (rhs != null) { return rhs.get(targetCurrencyCode); } return null; } /** * Registers the specified exchange rate. * * @param ratesMap The map to add rate to * @param sourceCurrencyCode The source currency. * @param targetCurrencyCode The target currency. * @param rate The known exchange rate. */ private void addRate(Map<String, Map<String, Double>> ratesMap, String sourceCurrencyCode, String targetCurrencyCode, double rate) { Map<String, Double> rhs = ratesMap.get(sourceCurrencyCode); if (rhs == null) { rhs = new HashMap<String, Double>(); ratesMap.put(sourceCurrencyCode, rhs); } rhs.put(targetCurrencyCode, rate); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FileExchangeRateProvider that = (FileExchangeRateProvider) o; return !(rates != null ? !rates.equals(that.rates) : that.rates != null); } @Override public int hashCode() { return rates != null ? rates.hashCode() : 0; } @Override public String toString() { return "["+this.getClass().getName()+" : " + rates.size() + " rates.]"; } @Override public Set<String> listAvailableCurrencies() { Set<String> currencies = new HashSet<String>(); for(String from : rates.keySet()) { currencies.add(from); for(String to : rates.get(from).keySet()) { currencies.add(to); } } return currencies; } @Override public boolean reload() throws SolrException { InputStream is = null; Map<String, Map<String, Double>> tmpRates = new HashMap<String, Map<String, Double>>(); try { log.info("Reloading exchange rates from file "+this.currencyConfigFile); is = loader.openResource(currencyConfigFile); javax.xml.parsers.DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); try { dbf.setXIncludeAware(true); dbf.setNamespaceAware(true); } catch (UnsupportedOperationException e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "XML parser doesn't support XInclude option", e); } try { Document doc = dbf.newDocumentBuilder().parse(is); XPathFactory xpathFactory = XPathFactory.newInstance(); XPath xpath = xpathFactory.newXPath(); // Parse exchange rates. NodeList nodes = (NodeList) xpath.evaluate("/currencyConfig/rates/rate", doc, XPathConstants.NODESET); for (int i = 0; i < nodes.getLength(); i++) { Node rateNode = nodes.item(i); NamedNodeMap attributes = rateNode.getAttributes(); Node from = attributes.getNamedItem("from"); Node to = attributes.getNamedItem("to"); Node rate = attributes.getNamedItem("rate"); if (from == null || to == null || rate == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exchange rate missing attributes (required: from, to, rate) " + rateNode); } String fromCurrency = from.getNodeValue(); String toCurrency = to.getNodeValue(); Double exchangeRate; if (null == CurrencyField.getCurrency(fromCurrency)) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Specified 'from' currency not supported in this JVM: " + fromCurrency); } if (null == CurrencyField.getCurrency(toCurrency)) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Specified 'to' currency not supported in this JVM: " + toCurrency); } try { exchangeRate = Double.parseDouble(rate.getNodeValue()); } catch (NumberFormatException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Could not parse exchange rate: " + rateNode, e); } addRate(tmpRates, fromCurrency, toCurrency, exchangeRate); } } catch (SAXException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e); } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e); } catch (ParserConfigurationException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e); } catch (XPathExpressionException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e); } } catch (IOException e) { throw new SolrException(ErrorCode.SERVER_ERROR, "Error while opening Currency configuration file "+currencyConfigFile, e); } finally { try { if (is != null) { is.close(); } } catch (IOException e) { e.printStackTrace(); } } // Atomically swap in the new rates map, if it loaded successfully this.rates = tmpRates; return true; } @Override public void init(Map<String,String> params) throws SolrException { this.currencyConfigFile = params.get(PARAM_CURRENCY_CONFIG); if(currencyConfigFile == null) { throw new SolrException(ErrorCode.NOT_FOUND, "Missing required configuration "+PARAM_CURRENCY_CONFIG); } // Removing config params custom to us params.remove(PARAM_CURRENCY_CONFIG); } @Override public void inform(ResourceLoader loader) throws SolrException { if(loader == null) { throw new SolrException(ErrorCode.SERVER_ERROR, "Needs ResourceLoader in order to load config file"); } this.loader = loader; reload(); } } /** * Represents a Currency field value, which includes a long amount and ISO currency code. */ class CurrencyValue { private long amount; private String currencyCode; /** * Constructs a new currency value. * * @param amount The amount. * @param currencyCode The currency code. */ public CurrencyValue(long amount, String currencyCode) { this.amount = amount; this.currencyCode = currencyCode; } /** * Constructs a new currency value by parsing the specific input. * <p/> * Currency values are expected to be in the format &lt;amount&gt;,&lt;currency code&gt;, * for example, "500,USD" would represent 5 U.S. Dollars. * <p/> * If no currency code is specified, the default is assumed. * * @param externalVal The value to parse. * @param defaultCurrency The default currency. * @return The parsed CurrencyValue. */ public static CurrencyValue parse(String externalVal, String defaultCurrency) { if (externalVal == null) { return null; } String amount = externalVal; String code = defaultCurrency; if (externalVal.contains(",")) { String[] amountAndCode = externalVal.split(","); amount = amountAndCode[0]; code = amountAndCode[1]; } if (amount.equals("*")) { return null; } Currency currency = CurrencyField.getCurrency(code); if (currency == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Currency code not supported by this JVM: " + code); } try { double value = Double.parseDouble(amount); long currencyValue = Math.round(value * Math.pow(10.0, currency.getDefaultFractionDigits())); return new CurrencyValue(currencyValue, code); } catch (NumberFormatException e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } } /** * The amount of the CurrencyValue. * * @return The amount. */ public long getAmount() { return amount; } /** * The ISO currency code of the CurrencyValue. * * @return The currency code. */ public String getCurrencyCode() { return currencyCode; } /** * Performs a currency conversion & unit conversion. * * @param exchangeRates Exchange rates to apply. * @param sourceCurrencyCode The source currency code. * @param sourceAmount The source amount. * @param targetCurrencyCode The target currency code. * @return The converted indexable units after the exchange rate and currency fraction digits are applied. */ public static long convertAmount(ExchangeRateProvider exchangeRates, String sourceCurrencyCode, long sourceAmount, String targetCurrencyCode) { double exchangeRate = exchangeRates.getExchangeRate(sourceCurrencyCode, targetCurrencyCode); return convertAmount(exchangeRate, sourceCurrencyCode, sourceAmount, targetCurrencyCode); } /** * Performs a currency conversion & unit conversion. * * @param exchangeRate Exchange rate to apply. * @param sourceFractionDigits The fraction digits of the source. * @param sourceAmount The source amount. * @param targetFractionDigits The fraction digits of the target. * @return The converted indexable units after the exchange rate and currency fraction digits are applied. */ public static long convertAmount(final double exchangeRate, final int sourceFractionDigits, final long sourceAmount, final int targetFractionDigits) { int digitDelta = targetFractionDigits - sourceFractionDigits; double value = ((double) sourceAmount * exchangeRate); if (digitDelta != 0) { if (digitDelta < 0) { for (int i = 0; i < -digitDelta; i++) { value *= 0.1; } } else { for (int i = 0; i < digitDelta; i++) { value *= 10.0; } } } return (long) value; } /** * Performs a currency conversion & unit conversion. * * @param exchangeRate Exchange rate to apply. * @param sourceCurrencyCode The source currency code. * @param sourceAmount The source amount. * @param targetCurrencyCode The target currency code. * @return The converted indexable units after the exchange rate and currency fraction digits are applied. */ public static long convertAmount(double exchangeRate, String sourceCurrencyCode, long sourceAmount, String targetCurrencyCode) { if (targetCurrencyCode.equals(sourceCurrencyCode)) { return sourceAmount; } int sourceFractionDigits = Currency.getInstance(sourceCurrencyCode).getDefaultFractionDigits(); Currency targetCurrency = Currency.getInstance(targetCurrencyCode); int targetFractionDigits = targetCurrency.getDefaultFractionDigits(); return convertAmount(exchangeRate, sourceFractionDigits, sourceAmount, targetFractionDigits); } /** * Returns a new CurrencyValue that is the conversion of this CurrencyValue to the specified currency. * * @param exchangeRates The exchange rate provider. * @param targetCurrencyCode The target currency code to convert this CurrencyValue to. * @return The converted CurrencyValue. */ public CurrencyValue convertTo(ExchangeRateProvider exchangeRates, String targetCurrencyCode) { return new CurrencyValue(convertAmount(exchangeRates, this.getCurrencyCode(), this.getAmount(), targetCurrencyCode), targetCurrencyCode); } @Override public String toString() { return String.valueOf(amount) + "," + currencyCode; } }
solr/core/src/java/org/apache/solr/schema/CurrencyField.java
package org.apache.solr.schema; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.analysis.util.ResourceLoader; import org.apache.lucene.analysis.util.ResourceLoaderAware; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.StorableField; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.Query; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Filter; import org.apache.lucene.search.FieldValueFilter; import org.apache.lucene.queries.ChainedFilter; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.response.TextResponseWriter; import org.apache.solr.search.QParser; import org.apache.solr.search.SolrConstantScoreQuery; import org.apache.solr.search.function.ValueSourceRangeFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Currency; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * Field type for support of monetary values. * <p> * See <a href="http://wiki.apache.org/solr/CurrencyField">http://wiki.apache.org/solr/CurrencyField</a> */ public class CurrencyField extends FieldType implements SchemaAware, ResourceLoaderAware { protected static final String PARAM_DEFAULT_CURRENCY = "defaultCurrency"; protected static final String PARAM_RATE_PROVIDER_CLASS = "providerClass"; protected static final Object PARAM_PRECISION_STEP = "precisionStep"; protected static final String DEFAULT_RATE_PROVIDER_CLASS = "solr.FileExchangeRateProvider"; protected static final String DEFAULT_DEFAULT_CURRENCY = "USD"; protected static final String DEFAULT_PRECISION_STEP = "0"; protected static final String FIELD_SUFFIX_AMOUNT_RAW = "_amount_raw"; protected static final String FIELD_SUFFIX_CURRENCY = "_currency"; private IndexSchema schema; protected FieldType fieldTypeCurrency; protected FieldType fieldTypeAmountRaw; private String exchangeRateProviderClass; private String defaultCurrency; private ExchangeRateProvider provider; public static Logger log = LoggerFactory.getLogger(CurrencyField.class); /** * A wrapper arround <code>Currency.getInstance</code> that returns null * instead of throwing <code>IllegalArgumentException</code> * if the specified Currency does not exist in this JVM. * * @see Currency#getInstance(String) */ public static Currency getCurrency(final String code) { try { return Currency.getInstance(code); } catch (IllegalArgumentException e) { /* :NOOP: */ } return null; } @Override protected void init(IndexSchema schema, Map<String, String> args) { super.init(schema, args); if (this.isMultiValued()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "CurrencyField types can not be multiValued: " + this.typeName); } this.schema = schema; this.exchangeRateProviderClass = args.get(PARAM_RATE_PROVIDER_CLASS); this.defaultCurrency = args.get(PARAM_DEFAULT_CURRENCY); if (this.defaultCurrency == null) { this.defaultCurrency = DEFAULT_DEFAULT_CURRENCY; } if (this.exchangeRateProviderClass == null) { this.exchangeRateProviderClass = DEFAULT_RATE_PROVIDER_CLASS; } if (null == getCurrency(this.defaultCurrency)) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Default currency code is not supported by this JVM: " + this.defaultCurrency); } String precisionStepString = args.get(PARAM_PRECISION_STEP); if (precisionStepString == null) { precisionStepString = DEFAULT_PRECISION_STEP; } // Initialize field type for amount fieldTypeAmountRaw = new TrieLongField(); fieldTypeAmountRaw.setTypeName("amount_raw_type_tlong"); Map<String,String> map = new HashMap<String,String>(1); map.put("precisionStep", precisionStepString); fieldTypeAmountRaw.init(schema, map); // Initialize field type for currency string fieldTypeCurrency = new StrField(); fieldTypeCurrency.setTypeName("currency_type_string"); fieldTypeCurrency.init(schema, new HashMap<String,String>()); args.remove(PARAM_RATE_PROVIDER_CLASS); args.remove(PARAM_DEFAULT_CURRENCY); args.remove(PARAM_PRECISION_STEP); try { Class<? extends ExchangeRateProvider> c = schema.getResourceLoader().findClass(exchangeRateProviderClass, ExchangeRateProvider.class); provider = c.newInstance(); provider.init(args); } catch (Exception e) { throw new SolrException(ErrorCode.BAD_REQUEST, "Error instantiating exhange rate provider "+exchangeRateProviderClass+": " + e.getMessage(), e); } } @Override public boolean isPolyField() { return true; } @Override public void checkSchemaField(final SchemaField field) throws SolrException { super.checkSchemaField(field); if (field.multiValued()) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "CurrencyFields can not be multiValued: " + field.getName()); } } @Override public List<StorableField> createFields(SchemaField field, Object externalVal, float boost) { CurrencyValue value = CurrencyValue.parse(externalVal.toString(), defaultCurrency); List<StorableField> f = new ArrayList<StorableField>(); SchemaField amountField = getAmountField(field); f.add(amountField.createField(String.valueOf(value.getAmount()), amountField.indexed() && !amountField.omitNorms() ? boost : 1F)); SchemaField currencyField = getCurrencyField(field); f.add(currencyField.createField(value.getCurrencyCode(), currencyField.indexed() && !currencyField.omitNorms() ? boost : 1F)); if (field.stored()) { org.apache.lucene.document.FieldType customType = new org.apache.lucene.document.FieldType(); assert !customType.omitNorms(); customType.setStored(true); String storedValue = externalVal.toString().trim(); if (storedValue.indexOf(",") < 0) { storedValue += "," + defaultCurrency; } f.add(createField(field.getName(), storedValue, customType, 1F)); } return f; } private SchemaField getAmountField(SchemaField field) { return schema.getField(field.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_AMOUNT_RAW); } private SchemaField getCurrencyField(SchemaField field) { return schema.getField(field.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_CURRENCY); } private void createDynamicCurrencyField(String suffix, FieldType type) { String name = "*" + POLY_FIELD_SEPARATOR + suffix; Map<String, String> props = new HashMap<String, String>(); props.put("indexed", "true"); props.put("stored", "false"); props.put("multiValued", "false"); props.put("omitNorms", "true"); int p = SchemaField.calcProps(name, type, props); schema.registerDynamicField(SchemaField.create(name, type, p, null)); } /** * When index schema is informed, add dynamic fields. * * @param indexSchema The index schema. */ @Override public void inform(IndexSchema indexSchema) { createDynamicCurrencyField(FIELD_SUFFIX_CURRENCY, fieldTypeCurrency); createDynamicCurrencyField(FIELD_SUFFIX_AMOUNT_RAW, fieldTypeAmountRaw); } /** * Load the currency config when resource loader initialized. * * @param resourceLoader The resource loader. */ @Override public void inform(ResourceLoader resourceLoader) { provider.inform(resourceLoader); boolean reloaded = provider.reload(); if(!reloaded) { log.warn("Failed reloading currencies"); } } @Override public Query getFieldQuery(QParser parser, SchemaField field, String externalVal) { CurrencyValue value = CurrencyValue.parse(externalVal, defaultCurrency); CurrencyValue valueDefault; valueDefault = value.convertTo(provider, defaultCurrency); return getRangeQuery(parser, field, valueDefault, valueDefault, true, true); } /** * <p> * Returns a ValueSource over this field in which the numeric value for * each document represents the indexed value as converted to the default * currency for the field, normalized to it's most granular form based * on the default fractional digits. * </p> * <p> * For example: If the default Currency specified for a field is * <code>USD</code>, then the values returned by this value source would * represent the equivilent number of "cents" (ie: value in dollars * 100) * after converting each document's native currency to USD -- because the * default fractional digits for <code>USD</code> is "<code>2</code>". * So for a document whose indexed value was currently equivilent to * "<code>5.43,USD</code>" using the the exchange provider for this field, * this ValueSource would return a value of "<code>543<code>" * </p> * * @see #PARAM_DEFAULT_CURRENCY * @see #DEFAULT_DEFAULT_CURRENCY * @see Currency#getDefaultFractionDigits * @see getConvertedValueSource */ public RawCurrencyValueSource getValueSource(SchemaField field, QParser parser) { field.checkFieldCacheSource(parser); return new RawCurrencyValueSource(field, defaultCurrency, parser); } /** * <p> * Returns a ValueSource over this field in which the numeric value for * each document represents the value from the underlying * <code>RawCurrencyValueSource</code> as converted to the specified target * Currency. * </p> * <p> * For example: If the <code>targetCurrencyCode</code> param is set to * <code>USD</code>, then the values returned by this value source would * represent the equivilent number of dollars after converting each * document's raw value to <code>USD</code>. So for a document whose * indexed value was currently equivilent to "<code>5.43,USD</code>" * using the the exchange provider for this field, this ValueSource would * return a value of "<code>5.43<code>" * </p> * * @param targetCurrencyCode The target currency for the resulting value source, if null the defaultCurrency for this field type will be used * @param source the raw ValueSource to wrap * @see #PARAM_DEFAULT_CURRENCY * @see #DEFAULT_DEFAULT_CURRENCY * @see getValueSource */ public ValueSource getConvertedValueSource(String targetCurrencyCode, RawCurrencyValueSource source) { if (null == targetCurrencyCode) { targetCurrencyCode = defaultCurrency; } return new ConvertedCurrencyValueSource(targetCurrencyCode, source); } @Override public Query getRangeQuery(QParser parser, SchemaField field, String part1, String part2, final boolean minInclusive, final boolean maxInclusive) { final CurrencyValue p1 = CurrencyValue.parse(part1, defaultCurrency); final CurrencyValue p2 = CurrencyValue.parse(part2, defaultCurrency); if (p1 != null && p2 != null && !p1.getCurrencyCode().equals(p2.getCurrencyCode())) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot parse range query " + part1 + " to " + part2 + ": range queries only supported when upper and lower bound have same currency."); } return getRangeQuery(parser, field, p1, p2, minInclusive, maxInclusive); } public Query getRangeQuery(QParser parser, SchemaField field, final CurrencyValue p1, final CurrencyValue p2, final boolean minInclusive, final boolean maxInclusive) { String currencyCode = (p1 != null) ? p1.getCurrencyCode() : (p2 != null) ? p2.getCurrencyCode() : defaultCurrency; // ValueSourceRangeFilter doesn't check exists(), so we have to final Filter docsWithValues = new FieldValueFilter(getAmountField(field).getName()); final Filter vsRangeFilter = new ValueSourceRangeFilter (new RawCurrencyValueSource(field, currencyCode, parser), p1 == null ? null : p1.getAmount() + "", p2 == null ? null : p2.getAmount() + "", minInclusive, maxInclusive); final Filter docsInRange = new ChainedFilter (new Filter [] { docsWithValues, vsRangeFilter }, ChainedFilter.AND); return new SolrConstantScoreQuery(docsInRange); } @Override public SortField getSortField(SchemaField field, boolean reverse) { // Convert all values to default currency for sorting. return (new RawCurrencyValueSource(field, defaultCurrency, null)).getSortField(reverse); } @Override public void write(TextResponseWriter writer, String name, StorableField field) throws IOException { writer.writeStr(name, field.stringValue(), true); } public ExchangeRateProvider getProvider() { return provider; } /** * <p> * A value source whose values represent the "normal" values * in the specified target currency. * </p> * @see RawCurrencyValueSource */ class ConvertedCurrencyValueSource extends ValueSource { private final Currency targetCurrency; private final RawCurrencyValueSource source; private final double rate; public ConvertedCurrencyValueSource(String targetCurrencyCode, RawCurrencyValueSource source) { this.source = source; this.targetCurrency = getCurrency(targetCurrencyCode); if (null == targetCurrency) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Currency code not supported by this JVM: " + targetCurrencyCode); } // the target digits & currency of our source, // become the source digits & currency of ourselves this.rate = provider.getExchangeRate (source.getTargetCurrency().getCurrencyCode(), targetCurrency.getCurrencyCode()); } @Override public FunctionValues getValues(Map context, AtomicReaderContext reader) throws IOException { final FunctionValues amounts = source.getValues(context, reader); // the target digits & currency of our source, // become the source digits & currency of ourselves final String sourceCurrencyCode = source.getTargetCurrency().getCurrencyCode(); final int sourceFractionDigits = source.getTargetCurrency().getDefaultFractionDigits(); final double divisor = Math.pow(10D, targetCurrency.getDefaultFractionDigits()); return new FunctionValues() { @Override public boolean exists(int doc) { return amounts.exists(doc); } @Override public long longVal(int doc) { return (long) doubleVal(doc); } @Override public int intVal(int doc) { return (int) doubleVal(doc); } @Override public double doubleVal(int doc) { return CurrencyValue.convertAmount(rate, sourceCurrencyCode, amounts.longVal(doc), targetCurrency.getCurrencyCode()) / divisor; } @Override public float floatVal(int doc) { return CurrencyValue.convertAmount(rate, sourceCurrencyCode, amounts.longVal(doc), targetCurrency.getCurrencyCode()) / ((float)divisor); } @Override public String strVal(int doc) { return Double.toString(doubleVal(doc)); } @Override public String toString(int doc) { return name() + '(' + strVal(doc) + ')'; } }; } public String name() { return "currency"; } @Override public String description() { return name() + "(" + source.getField().getName() + "," + targetCurrency.getCurrencyCode()+")"; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ConvertedCurrencyValueSource that = (ConvertedCurrencyValueSource) o; return !(source != null ? !source.equals(that.source) : that.source != null) && (rate == that.rate) && !(targetCurrency != null ? !targetCurrency.equals(that.targetCurrency) : that.targetCurrency != null); } @Override public int hashCode() { int result = targetCurrency != null ? targetCurrency.hashCode() : 0; result = 31 * result + (source != null ? source.hashCode() : 0); result = 31 * (int) Double.doubleToLongBits(rate); return result; } } /** * <p> * A value source whose values represent the "raw" (ie: normalized using * the number of default fractional digits) values in the specified * target currency). * </p> * <p> * For example: if the specified target currency is "<code>USD</code>" * then the numeric values are the number of pennies in the value * (ie: <code>$n * 100</code>) since the number of defalt fractional * digits for <code>USD</code> is "<code>2</code>") * </p> * @see ConvertedCurrencValueSource */ class RawCurrencyValueSource extends ValueSource { private static final long serialVersionUID = 1L; private final Currency targetCurrency; private ValueSource currencyValues; private ValueSource amountValues; private final SchemaField sf; public RawCurrencyValueSource(SchemaField sfield, String targetCurrencyCode, QParser parser) { this.sf = sfield; this.targetCurrency = getCurrency(targetCurrencyCode); if (null == targetCurrency) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Currency code not supported by this JVM: " + targetCurrencyCode); } SchemaField amountField = schema.getField(sf.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_AMOUNT_RAW); SchemaField currencyField = schema.getField(sf.getName() + POLY_FIELD_SEPARATOR + FIELD_SUFFIX_CURRENCY); currencyValues = currencyField.getType().getValueSource(currencyField, parser); amountValues = amountField.getType().getValueSource(amountField, parser); } public SchemaField getField() { return sf; } public Currency getTargetCurrency() { return targetCurrency; } @Override public FunctionValues getValues(Map context, AtomicReaderContext reader) throws IOException { final FunctionValues amounts = amountValues.getValues(context, reader); final FunctionValues currencies = currencyValues.getValues(context, reader); return new FunctionValues() { private final int MAX_CURRENCIES_TO_CACHE = 256; private final int[] fractionDigitCache = new int[MAX_CURRENCIES_TO_CACHE]; private final String[] currencyOrdToCurrencyCache = new String[MAX_CURRENCIES_TO_CACHE]; private final double[] exchangeRateCache = new double[MAX_CURRENCIES_TO_CACHE]; private int targetFractionDigits = -1; private int targetCurrencyOrd = -1; private boolean initializedCache; private String getDocCurrencyCode(int doc, int currencyOrd) { if (currencyOrd < MAX_CURRENCIES_TO_CACHE) { String currency = currencyOrdToCurrencyCache[currencyOrd]; if (currency == null) { currencyOrdToCurrencyCache[currencyOrd] = currency = currencies.strVal(doc); } if (currency == null) { currency = defaultCurrency; } if (targetCurrencyOrd == -1 && currency.equals(targetCurrency.getCurrencyCode() )) { targetCurrencyOrd = currencyOrd; } return currency; } else { return currencies.strVal(doc); } } /** throws a (Server Error) SolrException if the code is not valid */ private Currency getDocCurrency(int doc, int currencyOrd) { String code = getDocCurrencyCode(doc, currencyOrd); Currency c = getCurrency(code); if (null == c) { throw new SolrException (SolrException.ErrorCode.SERVER_ERROR, "Currency code of document is not supported by this JVM: "+code); } return c; } @Override public boolean exists(int doc) { return amounts.exists(doc); } @Override public long longVal(int doc) { long amount = amounts.longVal(doc); // bail fast using whatever ammounts defaults to if no value // (if we don't do this early, currencyOrd may be < 0, // causing index bounds exception if ( ! exists(doc) ) { return amount; } if (!initializedCache) { for (int i = 0; i < fractionDigitCache.length; i++) { fractionDigitCache[i] = -1; } initializedCache = true; } int currencyOrd = currencies.ordVal(doc); if (currencyOrd == targetCurrencyOrd) { return amount; } double exchangeRate; int sourceFractionDigits; if (targetFractionDigits == -1) { targetFractionDigits = targetCurrency.getDefaultFractionDigits(); } if (currencyOrd < MAX_CURRENCIES_TO_CACHE) { exchangeRate = exchangeRateCache[currencyOrd]; if (exchangeRate <= 0.0) { String sourceCurrencyCode = getDocCurrencyCode(doc, currencyOrd); exchangeRate = exchangeRateCache[currencyOrd] = provider.getExchangeRate(sourceCurrencyCode, targetCurrency.getCurrencyCode()); } sourceFractionDigits = fractionDigitCache[currencyOrd]; if (sourceFractionDigits == -1) { sourceFractionDigits = fractionDigitCache[currencyOrd] = getDocCurrency(doc, currencyOrd).getDefaultFractionDigits(); } } else { Currency source = getDocCurrency(doc, currencyOrd); exchangeRate = provider.getExchangeRate(source.getCurrencyCode(), targetCurrency.getCurrencyCode()); sourceFractionDigits = source.getDefaultFractionDigits(); } return CurrencyValue.convertAmount(exchangeRate, sourceFractionDigits, amount, targetFractionDigits); } @Override public int intVal(int doc) { return (int) longVal(doc); } @Override public double doubleVal(int doc) { return (double) longVal(doc); } @Override public float floatVal(int doc) { return (float) longVal(doc); } @Override public String strVal(int doc) { return Long.toString(longVal(doc)); } @Override public String toString(int doc) { return name() + '(' + amounts.toString(doc) + ',' + currencies.toString(doc) + ')'; } }; } public String name() { return "rawcurrency"; } @Override public String description() { return name() + "(" + sf.getName() + ",target="+targetCurrency.getCurrencyCode()+")"; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RawCurrencyValueSource that = (RawCurrencyValueSource) o; return !(amountValues != null ? !amountValues.equals(that.amountValues) : that.amountValues != null) && !(currencyValues != null ? !currencyValues.equals(that.currencyValues) : that.currencyValues != null) && !(targetCurrency != null ? !targetCurrency.equals(that.targetCurrency) : that.targetCurrency != null); } @Override public int hashCode() { int result = targetCurrency != null ? targetCurrency.hashCode() : 0; result = 31 * result + (currencyValues != null ? currencyValues.hashCode() : 0); result = 31 * result + (amountValues != null ? amountValues.hashCode() : 0); return result; } } } /** * Configuration for currency. Provides currency exchange rates. */ class FileExchangeRateProvider implements ExchangeRateProvider { public static Logger log = LoggerFactory.getLogger(FileExchangeRateProvider.class); protected static final String PARAM_CURRENCY_CONFIG = "currencyConfig"; // Exchange rate map, maps Currency Code -> Currency Code -> Rate private Map<String, Map<String, Double>> rates = new HashMap<String, Map<String, Double>>(); private String currencyConfigFile; private ResourceLoader loader; /** * Returns the currently known exchange rate between two currencies. If a direct rate has been loaded, * it is used. Otherwise, if a rate is known to convert the target currency to the source, the inverse * exchange rate is computed. * * @param sourceCurrencyCode The source currency being converted from. * @param targetCurrencyCode The target currency being converted to. * @return The exchange rate. * @throws SolrException if the requested currency pair cannot be found */ @Override public double getExchangeRate(String sourceCurrencyCode, String targetCurrencyCode) { if (sourceCurrencyCode == null || targetCurrencyCode == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot get exchange rate; currency was null."); } if (sourceCurrencyCode.equals(targetCurrencyCode)) { return 1.0; } Double directRate = lookupRate(sourceCurrencyCode, targetCurrencyCode); if (directRate != null) { return directRate; } Double symmetricRate = lookupRate(targetCurrencyCode, sourceCurrencyCode); if (symmetricRate != null) { return 1.0 / symmetricRate; } throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No available conversion rate between " + sourceCurrencyCode + " to " + targetCurrencyCode); } /** * Looks up the current known rate, if any, between the source and target currencies. * * @param sourceCurrencyCode The source currency being converted from. * @param targetCurrencyCode The target currency being converted to. * @return The exchange rate, or null if no rate has been registered. */ private Double lookupRate(String sourceCurrencyCode, String targetCurrencyCode) { Map<String, Double> rhs = rates.get(sourceCurrencyCode); if (rhs != null) { return rhs.get(targetCurrencyCode); } return null; } /** * Registers the specified exchange rate. * * @param ratesMap The map to add rate to * @param sourceCurrencyCode The source currency. * @param targetCurrencyCode The target currency. * @param rate The known exchange rate. */ private void addRate(Map<String, Map<String, Double>> ratesMap, String sourceCurrencyCode, String targetCurrencyCode, double rate) { Map<String, Double> rhs = ratesMap.get(sourceCurrencyCode); if (rhs == null) { rhs = new HashMap<String, Double>(); ratesMap.put(sourceCurrencyCode, rhs); } rhs.put(targetCurrencyCode, rate); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FileExchangeRateProvider that = (FileExchangeRateProvider) o; return !(rates != null ? !rates.equals(that.rates) : that.rates != null); } @Override public int hashCode() { return rates != null ? rates.hashCode() : 0; } @Override public String toString() { return "["+this.getClass().getName()+" : " + rates.size() + " rates.]"; } @Override public Set<String> listAvailableCurrencies() { Set<String> currencies = new HashSet<String>(); for(String from : rates.keySet()) { currencies.add(from); for(String to : rates.get(from).keySet()) { currencies.add(to); } } return currencies; } @Override public boolean reload() throws SolrException { InputStream is = null; Map<String, Map<String, Double>> tmpRates = new HashMap<String, Map<String, Double>>(); try { log.info("Reloading exchange rates from file "+this.currencyConfigFile); is = loader.openResource(currencyConfigFile); javax.xml.parsers.DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); try { dbf.setXIncludeAware(true); dbf.setNamespaceAware(true); } catch (UnsupportedOperationException e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "XML parser doesn't support XInclude option", e); } try { Document doc = dbf.newDocumentBuilder().parse(is); XPathFactory xpathFactory = XPathFactory.newInstance(); XPath xpath = xpathFactory.newXPath(); // Parse exchange rates. NodeList nodes = (NodeList) xpath.evaluate("/currencyConfig/rates/rate", doc, XPathConstants.NODESET); for (int i = 0; i < nodes.getLength(); i++) { Node rateNode = nodes.item(i); NamedNodeMap attributes = rateNode.getAttributes(); Node from = attributes.getNamedItem("from"); Node to = attributes.getNamedItem("to"); Node rate = attributes.getNamedItem("rate"); if (from == null || to == null || rate == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exchange rate missing attributes (required: from, to, rate) " + rateNode); } String fromCurrency = from.getNodeValue(); String toCurrency = to.getNodeValue(); Double exchangeRate; if (null == CurrencyField.getCurrency(fromCurrency)) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Specified 'from' currency not supported in this JVM: " + fromCurrency); } if (null == CurrencyField.getCurrency(toCurrency)) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Specified 'to' currency not supported in this JVM: " + toCurrency); } try { exchangeRate = Double.parseDouble(rate.getNodeValue()); } catch (NumberFormatException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Could not parse exchange rate: " + rateNode, e); } addRate(tmpRates, fromCurrency, toCurrency, exchangeRate); } } catch (SAXException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e); } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e); } catch (ParserConfigurationException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e); } catch (XPathExpressionException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e); } } catch (IOException e) { throw new SolrException(ErrorCode.SERVER_ERROR, "Error while opening Currency configuration file "+currencyConfigFile, e); } finally { try { if (is != null) { is.close(); } } catch (IOException e) { e.printStackTrace(); } } // Atomically swap in the new rates map, if it loaded successfully this.rates = tmpRates; return true; } @Override public void init(Map<String,String> params) throws SolrException { this.currencyConfigFile = params.get(PARAM_CURRENCY_CONFIG); if(currencyConfigFile == null) { throw new SolrException(ErrorCode.NOT_FOUND, "Missing required configuration "+PARAM_CURRENCY_CONFIG); } // Removing config params custom to us params.remove(PARAM_CURRENCY_CONFIG); } @Override public void inform(ResourceLoader loader) throws SolrException { if(loader == null) { throw new SolrException(ErrorCode.SERVER_ERROR, "Needs ResourceLoader in order to load config file"); } this.loader = loader; reload(); } } /** * Represents a Currency field value, which includes a long amount and ISO currency code. */ class CurrencyValue { private long amount; private String currencyCode; /** * Constructs a new currency value. * * @param amount The amount. * @param currencyCode The currency code. */ public CurrencyValue(long amount, String currencyCode) { this.amount = amount; this.currencyCode = currencyCode; } /** * Constructs a new currency value by parsing the specific input. * <p/> * Currency values are expected to be in the format &lt;amount&gt;,&lt;currency code&gt;, * for example, "500,USD" would represent 5 U.S. Dollars. * <p/> * If no currency code is specified, the default is assumed. * * @param externalVal The value to parse. * @param defaultCurrency The default currency. * @return The parsed CurrencyValue. */ public static CurrencyValue parse(String externalVal, String defaultCurrency) { if (externalVal == null) { return null; } String amount = externalVal; String code = defaultCurrency; if (externalVal.contains(",")) { String[] amountAndCode = externalVal.split(","); amount = amountAndCode[0]; code = amountAndCode[1]; } if (amount.equals("*")) { return null; } Currency currency = CurrencyField.getCurrency(code); if (currency == null) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Currency code not supported by this JVM: " + code); } try { double value = Double.parseDouble(amount); long currencyValue = Math.round(value * Math.pow(10.0, currency.getDefaultFractionDigits())); return new CurrencyValue(currencyValue, code); } catch (NumberFormatException e) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } } /** * The amount of the CurrencyValue. * * @return The amount. */ public long getAmount() { return amount; } /** * The ISO currency code of the CurrencyValue. * * @return The currency code. */ public String getCurrencyCode() { return currencyCode; } /** * Performs a currency conversion & unit conversion. * * @param exchangeRates Exchange rates to apply. * @param sourceCurrencyCode The source currency code. * @param sourceAmount The source amount. * @param targetCurrencyCode The target currency code. * @return The converted indexable units after the exchange rate and currency fraction digits are applied. */ public static long convertAmount(ExchangeRateProvider exchangeRates, String sourceCurrencyCode, long sourceAmount, String targetCurrencyCode) { double exchangeRate = exchangeRates.getExchangeRate(sourceCurrencyCode, targetCurrencyCode); return convertAmount(exchangeRate, sourceCurrencyCode, sourceAmount, targetCurrencyCode); } /** * Performs a currency conversion & unit conversion. * * @param exchangeRate Exchange rate to apply. * @param sourceFractionDigits The fraction digits of the source. * @param sourceAmount The source amount. * @param targetFractionDigits The fraction digits of the target. * @return The converted indexable units after the exchange rate and currency fraction digits are applied. */ public static long convertAmount(final double exchangeRate, final int sourceFractionDigits, final long sourceAmount, final int targetFractionDigits) { int digitDelta = targetFractionDigits - sourceFractionDigits; double value = ((double) sourceAmount * exchangeRate); if (digitDelta != 0) { if (digitDelta < 0) { for (int i = 0; i < -digitDelta; i++) { value *= 0.1; } } else { for (int i = 0; i < digitDelta; i++) { value *= 10.0; } } } return (long) value; } /** * Performs a currency conversion & unit conversion. * * @param exchangeRate Exchange rate to apply. * @param sourceCurrencyCode The source currency code. * @param sourceAmount The source amount. * @param targetCurrencyCode The target currency code. * @return The converted indexable units after the exchange rate and currency fraction digits are applied. */ public static long convertAmount(double exchangeRate, String sourceCurrencyCode, long sourceAmount, String targetCurrencyCode) { if (targetCurrencyCode.equals(sourceCurrencyCode)) { return sourceAmount; } int sourceFractionDigits = Currency.getInstance(sourceCurrencyCode).getDefaultFractionDigits(); Currency targetCurrency = Currency.getInstance(targetCurrencyCode); int targetFractionDigits = targetCurrency.getDefaultFractionDigits(); return convertAmount(exchangeRate, sourceFractionDigits, sourceAmount, targetFractionDigits); } /** * Returns a new CurrencyValue that is the conversion of this CurrencyValue to the specified currency. * * @param exchangeRates The exchange rate provider. * @param targetCurrencyCode The target currency code to convert this CurrencyValue to. * @return The converted CurrencyValue. */ public CurrencyValue convertTo(ExchangeRateProvider exchangeRates, String targetCurrencyCode) { return new CurrencyValue(convertAmount(exchangeRates, this.getCurrencyCode(), this.getAmount(), targetCurrencyCode), targetCurrencyCode); } @Override public String toString() { return String.valueOf(amount) + "," + currencyCode; } }
SOLR-4138: doc typos git-svn-id: 308d55f399f3bd9aa0560a10e81a003040006c48@1452508 13f79535-47bb-0310-9956-ffa450edef68
solr/core/src/java/org/apache/solr/schema/CurrencyField.java
SOLR-4138: doc typos
Java
apache-2.0
8bd826ae00b99a13c65de4ee5bdb8c16976d2d7d
0
michael-rapp/ChromeLikeTabSwitcher
/* * Copyright 2016 Michael Rapp * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package de.mrapp.android.tabswitcher; import android.content.Context; import android.graphics.Bitmap; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.Parcel; import android.os.Parcelable; import android.support.annotation.ColorInt; import android.support.annotation.DrawableRes; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.StringRes; import android.support.v4.content.ContextCompat; import android.text.TextUtils; import static de.mrapp.android.util.Condition.ensureNotEmpty; import static de.mrapp.android.util.Condition.ensureNotNull; /** * A tab, which can be added to a {@link TabSwitcher} widget. It has a title, as well as an optional * icon. Furthermore, it is possible to set a custom color and to specify, whether the tab should be * closeable, or not. * * @author Michael Rapp * @since 1.0.0 */ // TODO: When changing attributes while the switcher is shown, the corresponding tab view must be adapted public class Tab implements Parcelable { /** * A creator, which allows to create instances of the class {@link Tab} from parcels. */ public static final Creator<Tab> CREATOR = new Creator<Tab>() { @Override public Tab createFromParcel(final Parcel source) { return new Tab(source); } @Override public Tab[] newArray(final int size) { return new Tab[size]; } }; /** * The constant serial version UID. */ private static final long serialVersionUID = 1L; /** * The tab's title. */ private final CharSequence title; /** * The resource id of the tab's icon. */ private int iconId; /** * The tab's icon as a bitmap. */ private Bitmap iconBitmap; /** * True, if the tab is closeable, false otherwise. */ private boolean closeable; /** * The tab's color. */ private int color; /** * Creates a new tab, which can be added to a {@link TabSwitcher} widget. * * @param source * The parcel, the tab should be created from, as an instance of the class {@link * Parcel}. The parcel may not be null */ private Tab(@NonNull final Parcel source) { this.title = TextUtils.CHAR_SEQUENCE_CREATOR.createFromParcel(source); this.iconId = source.readInt(); this.iconBitmap = source.readParcelable(Bitmap.class.getClassLoader()); this.closeable = source.readInt() > 0; this.color = source.readInt(); } /** * Creates a new tab, which can be added to a {@link TabSwitcher} widget. * * @param title * The tab's title as an instance of the type {@link CharSequence}. The title may not be * neither be null, nor empty */ public Tab(@NonNull final CharSequence title) { ensureNotNull(title, "The title may not be null"); ensureNotEmpty(title, "The title may not be empty"); this.title = title; this.closeable = true; this.iconId = -1; this.iconBitmap = null; this.color = -1; } /** * Creates a new tab, which can be added to a {@link TabSwitcher} widget. * * @param context * The context, which should be used, as an instance of the class {@link Context}. The * context may not be null * @param resourceId * The resource id of the tab's title as an {@link Integer} value. The resource id must * correspond to a valid string resource */ public Tab(@NonNull final Context context, @StringRes final int resourceId) { this(context.getString(resourceId)); } /** * Returns the tab's title. * * @return The tabs title as an instance of the type {@link CharSequence}. The title may neither * be null, nor empty */ @NonNull public final CharSequence getTitle() { return title; } /** * Returns the tab's icon. * * @param context * The context, which should be used, as an instance of the class {@link Context}. The * context may not be null * @return The tab's icon as an instance of the class {@link Bitmap} or null, if no icon is set */ @Nullable public final Drawable getIcon(@NonNull final Context context) { if (iconId != -1) { return ContextCompat.getDrawable(context, iconId); } else { return iconBitmap != null ? new BitmapDrawable(context.getResources(), iconBitmap) : null; } } /** * Sets the tab's icon. * * @param resourceId * The resource id of the icon, which should be set, as an {@link Integer} value. The * resource id must correspond to a valid drawable resource */ public final void setIcon(@DrawableRes final int resourceId) { this.iconId = resourceId; this.iconBitmap = null; } /** * Sets the tab's icon. * * @param icon * The icon, which should be set, as an instance of the class {@link Bitmap} or null, if * no icon should be set */ public final void setIcon(@Nullable final Bitmap icon) { this.iconId = -1; this.iconBitmap = icon; } /** * Returns, whether the tab is closeable, or not. * * @return True, if the tab is closeable, false otherwise */ public final boolean isCloseable() { return closeable; } /** * Sets, whether the tab should be closeable, or not. * * @param closeable * True, if the tab should be closeable, false otherwise */ public final void setCloseable(final boolean closeable) { this.closeable = closeable; } /** * Returns the tab's color. * * @return The tab's color as an {@link Integer} value or -1, if no custom color is set */ @ColorInt public final int getColor() { return color; } /** * Sets the tab's color. * * @param color * The color, which should be set, as an {@link Integer} value or -1, if no custom color * should be set */ public final void setColor(@ColorInt final int color) { this.color = color; } @Override public final int describeContents() { return 0; } @Override public final void writeToParcel(final Parcel parcel, final int flags) { TextUtils.writeToParcel(title, parcel, flags); parcel.writeInt(iconId); parcel.writeParcelable(iconBitmap, flags); parcel.writeInt(closeable ? 1 : 0); parcel.writeInt(color); } }
library/src/main/java/de/mrapp/android/tabswitcher/Tab.java
/* * Copyright 2016 Michael Rapp * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package de.mrapp.android.tabswitcher; import android.content.Context; import android.graphics.Bitmap; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.Parcel; import android.os.Parcelable; import android.support.annotation.ColorInt; import android.support.annotation.DrawableRes; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.annotation.StringRes; import android.support.v4.content.ContextCompat; import android.text.TextUtils; import static de.mrapp.android.util.Condition.ensureNotEmpty; import static de.mrapp.android.util.Condition.ensureNotNull; /** * A tab, which can be added to a {@link TabSwitcher} widget. It has a title, as well as an optional * icon. Furthermore, it is possible to set a custom color and to specify, whether the tab should be * closeable, or not. * * @author Michael Rapp * @since 1.0.0 */ public class Tab implements Parcelable { /** * A creator, which allows to create instances of the class {@link Tab} from parcels. */ public static final Creator<Tab> CREATOR = new Creator<Tab>() { @Override public Tab createFromParcel(final Parcel source) { return new Tab(source); } @Override public Tab[] newArray(final int size) { return new Tab[size]; } }; /** * The constant serial version UID. */ private static final long serialVersionUID = 1L; /** * The tab's title. */ private final CharSequence title; /** * The resource id of the tab's icon. */ private int iconId; /** * The tab's icon as a bitmap. */ private Bitmap iconBitmap; /** * True, if the tab is closeable, false otherwise. */ private boolean closeable; /** * The tab's color. */ private int color; /** * Creates a new tab, which can be added to a {@link TabSwitcher} widget. * * @param source * The parcel, the tab should be created from, as an instance of the class {@link * Parcel}. The parcel may not be null */ private Tab(@NonNull final Parcel source) { this.title = TextUtils.CHAR_SEQUENCE_CREATOR.createFromParcel(source); this.iconId = source.readInt(); this.iconBitmap = source.readParcelable(Bitmap.class.getClassLoader()); this.closeable = source.readInt() > 0; this.color = source.readInt(); } /** * Creates a new tab, which can be added to a {@link TabSwitcher} widget. * * @param title * The tab's title as an instance of the type {@link CharSequence}. The title may not be * neither be null, nor empty */ public Tab(@NonNull final CharSequence title) { ensureNotNull(title, "The title may not be null"); ensureNotEmpty(title, "The title may not be empty"); this.title = title; this.closeable = true; this.iconId = -1; this.iconBitmap = null; this.color = -1; } /** * Creates a new tab, which can be added to a {@link TabSwitcher} widget. * * @param context * The context, which should be used, as an instance of the class {@link Context}. The * context may not be null * @param resourceId * The resource id of the tab's title as an {@link Integer} value. The resource id must * correspond to a valid string resource */ public Tab(@NonNull final Context context, @StringRes final int resourceId) { this(context.getString(resourceId)); } /** * Returns the tab's title. * * @return The tabs title as an instance of the type {@link CharSequence}. The title may neither * be null, nor empty */ @NonNull public final CharSequence getTitle() { return title; } /** * Returns the tab's icon. * * @param context * The context, which should be used, as an instance of the class {@link Context}. The * context may not be null * @return The tab's icon as an instance of the class {@link Bitmap} or null, if no icon is set */ @Nullable public final Drawable getIcon(@NonNull final Context context) { if (iconId != -1) { return ContextCompat.getDrawable(context, iconId); } else { return iconBitmap != null ? new BitmapDrawable(context.getResources(), iconBitmap) : null; } } /** * Sets the tab's icon. * * @param resourceId * The resource id of the icon, which should be set, as an {@link Integer} value. The * resource id must correspond to a valid drawable resource */ public final void setIcon(@DrawableRes final int resourceId) { this.iconId = resourceId; this.iconBitmap = null; } /** * Sets the tab's icon. * * @param icon * The icon, which should be set, as an instance of the class {@link Bitmap} or null, if * no icon should be set */ public final void setIcon(@Nullable final Bitmap icon) { this.iconId = -1; this.iconBitmap = icon; } /** * Returns, whether the tab is closeable, or not. * * @return True, if the tab is closeable, false otherwise */ public final boolean isCloseable() { return closeable; } /** * Sets, whether the tab should be closeable, or not. * * @param closeable * True, if the tab should be closeable, false otherwise */ public final void setCloseable(final boolean closeable) { this.closeable = closeable; } /** * Returns the tab's color. * * @return The tab's color as an {@link Integer} value or -1, if no custom color is set */ @ColorInt public final int getColor() { return color; } /** * Sets the tab's color. * * @param color * The color, which should be set, as an {@link Integer} value or -1, if no custom color * should be set */ public final void setColor(@ColorInt final int color) { this.color = color; } @Override public final int describeContents() { return 0; } @Override public final void writeToParcel(final Parcel parcel, final int flags) { TextUtils.writeToParcel(title, parcel, flags); parcel.writeInt(iconId); parcel.writeParcelable(iconBitmap, flags); parcel.writeInt(closeable ? 1 : 0); parcel.writeInt(color); } }
Added TODO comment.
library/src/main/java/de/mrapp/android/tabswitcher/Tab.java
Added TODO comment.
Java
apache-2.0
19531615ce9661e9c2f11861937d2bfbd89b0ba3
0
torakiki/sambox,veraPDF/veraPDF-pdfbox,ZhenyaM/veraPDF-pdfbox,BezrukovM/veraPDF-pdfbox,mdamt/pdfbox,mathieufortin01/pdfbox,gavanx/pdflearn,gavanx/pdflearn,joansmith/pdfbox,joansmith/pdfbox,torakiki/sambox,ChunghwaTelecom/pdfbox,ChunghwaTelecom/pdfbox,mathieufortin01/pdfbox,BezrukovM/veraPDF-pdfbox,benmccann/pdfbox,ZhenyaM/veraPDF-pdfbox,benmccann/pdfbox,veraPDF/veraPDF-pdfbox,mdamt/pdfbox
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.tools; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.font.PDFontDescriptor; import org.apache.pdfbox.util.PDFTextStripper; import org.apache.pdfbox.text.TextPosition; /** * Wrap stripped text in simple HTML, trying to form HTML paragraphs. Paragraphs * broken by pages, columns, or figures are not mended. * * @author jjb - http://www.johnjbarton.com * */ public class PDFText2HTML extends PDFTextStripper { private static final int INITIAL_PDF_TO_HTML_BYTES = 8192; private boolean onFirstPage = true; private final FontState fontState = new FontState(); /** * Constructor. * @throws IOException If there is an error during initialization. */ public PDFText2HTML() throws IOException { super(); setLineSeparator(LINE_SEPARATOR); setParagraphStart("<p>"); setParagraphEnd("</p>"+ LINE_SEPARATOR); setPageStart("<div style=\"page-break-before:always; page-break-after:always\">"); setPageEnd("</div>"+ LINE_SEPARATOR); setArticleStart(LINE_SEPARATOR); setArticleEnd(LINE_SEPARATOR); } /** * Write the header to the output document. Now also writes the tag defining * the character encoding. * * @throws IOException * If there is a problem writing out the header to the document. */ protected void writeHeader() throws IOException { StringBuffer buf = new StringBuffer(INITIAL_PDF_TO_HTML_BYTES); buf.append("<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\"" + "\n" + "\"http://www.w3.org/TR/html4/loose.dtd\">\n"); buf.append("<html><head>"); buf.append("<title>" + escape(getTitle()) + "</title>\n"); buf.append("<meta http-equiv=\"Content-Type\" content=\"text/html; charset=\"UTF-16\">\n"); buf.append("</head>\n"); buf.append("<body>\n"); super.writeString(buf.toString()); } /** * {@inheritDoc} */ @Override protected void writePage() throws IOException { if (onFirstPage) { writeHeader(); onFirstPage = false; } super.writePage(); } /** * {@inheritDoc} */ @Override public void endDocument(PDDocument document) throws IOException { super.writeString("</body></html>"); } /** * This method will attempt to guess the title of the document using * either the document properties or the first lines of text. * * @return returns the title. */ protected String getTitle() { String titleGuess = document.getDocumentInformation().getTitle(); if(titleGuess != null && titleGuess.length() > 0) { return titleGuess; } else { Iterator<List<TextPosition>> textIter = getCharactersByArticle().iterator(); float lastFontSize = -1.0f; StringBuffer titleText = new StringBuffer(); while (textIter.hasNext()) { Iterator<TextPosition> textByArticle = textIter.next().iterator(); while (textByArticle.hasNext()) { TextPosition position = textByArticle.next(); float currentFontSize = position.getFontSize(); //If we're past 64 chars we will assume that we're past the title //64 is arbitrary if (currentFontSize != lastFontSize || titleText.length() > 64) { if (titleText.length() > 0) { return titleText.toString(); } lastFontSize = currentFontSize; } if (currentFontSize > 13.0f) { // most body text is 12pt titleText.append(position.getUnicode()); } } } } return ""; } /** * Write out the article separator (div tag) with proper text direction * information. * * @param isLTR true if direction of text is left to right * @throws IOException * If there is an error writing to the stream. */ @Override protected void startArticle(boolean isLTR) throws IOException { if (isLTR) { super.writeString("<div>"); } else { super.writeString("<div dir=\"RTL\">"); } } /** * Write out the article separator. * * @throws IOException * If there is an error writing to the stream. */ @Override protected void endArticle() throws IOException { super.endArticle(); super.writeString("</div>"); } /** * Write a string to the output stream, maintain font state, and escape some HTML characters. * The font state is only preserved per word. * * @param text The text to write to the stream. * @param textPositions the corresponding text positions * @throws IOException If there is an error writing to the stream. */ @Override protected void writeString(String text, List<TextPosition> textPositions) throws IOException { super.writeString(fontState.push(text, textPositions)); } /** * Write a string to the output stream and escape some HTML characters. * * @param chars String to be written to the stream * @throws IOException * If there is an error writing to the stream. */ @Override protected void writeString(String chars) throws IOException { super.writeString(escape(chars)); } /** * Writes the paragraph end "</p>" to the output. Furthermore, it will also clear the font state. * * {@inheritDoc} */ @Override protected void writeParagraphEnd() throws IOException { super.writeString(fontState.clear()); // do not escape HTML super.writeParagraphEnd(); } /** * Escape some HTML characters. * * @param chars String to be escaped * @return returns escaped String. */ private static String escape(String chars) { StringBuilder builder = new StringBuilder(chars.length()); for (int i = 0; i < chars.length(); i++) { appendEscaped(builder, chars.charAt(i)); } return builder.toString(); } private static void appendEscaped(StringBuilder builder, char character) { // write non-ASCII as named entities if ((character < 32) || (character > 126)) { int charAsInt = character; builder.append("&#").append(charAsInt).append(";"); } else { switch (character) { case 34: builder.append("&quot;"); break; case 38: builder.append("&amp;"); break; case 60: builder.append("&lt;"); break; case 62: builder.append("&gt;"); break; default: builder.append(String.valueOf(character)); } } } /** * A helper class to maintain the current font state. It's public methods will emit opening and * closing tags as needed, and in the correct order. * * @author Axel Dörfler */ private static class FontState { protected List<String> stateList = new ArrayList<String>(); protected Set<String> stateSet = new HashSet<String>(); /** * Pushes new {@link TextPosition TextPositions} into the font state. The state is only * preserved correctly for each letter if the number of letters in <code>text</code> matches * the number of {@link TextPosition} objects. Otherwise, it's done once for the complete * array (just by looking at its first entry). * * @return A string that contains the text including tag changes caused by its font state. */ public String push(String text, List<TextPosition> textPositions) { StringBuilder buffer = new StringBuilder(); if (text.length() == textPositions.size()) { // There is a 1:1 mapping, and we can use the TextPositions directly for (int i = 0; i < text.length(); i++) { push(buffer, text.charAt(i), textPositions.get(i)); } } else if (!text.isEmpty()) { // The normalized text does not match the number of TextPositions, so we'll just // have a look at its first entry. // TODO change PDFTextStripper.normalize() such that it maintains the 1:1 relation if (textPositions.isEmpty()) { return text; } push(buffer, text.charAt(0), textPositions.get(0)); buffer.append(escape(text.substring(1))); } return buffer.toString(); } /** * Closes all open states. * @return A string that contains the closing tags of all currently open states. */ public String clear() { StringBuilder buffer = new StringBuilder(); closeUntil(buffer, null); stateList.clear(); stateSet.clear(); return buffer.toString(); } protected String push(StringBuilder buffer, char character, TextPosition textPosition) { boolean bold = false; boolean italics = false; PDFontDescriptor descriptor = textPosition.getFont().getFontDescriptor(); if (descriptor != null) { bold = isBold(descriptor); italics = isItalic(descriptor); } buffer.append(bold ? open("b") : close("b")); buffer.append(italics ? open("i") : close("i")); appendEscaped(buffer, character); return buffer.toString(); } private String open(String tag) { if (stateSet.contains(tag)) { return ""; } stateList.add(tag); stateSet.add(tag); return openTag(tag); } private String close(String tag) { if (!stateSet.contains(tag)) { return ""; } // Close all tags until (but including) the one we should close StringBuilder tagsBuilder = new StringBuilder(); int index = closeUntil(tagsBuilder, tag); // Remove from state stateList.remove(index); stateSet.remove(tag); // Now open the states that were closed but should remain open again for (; index < stateList.size(); index++) { tagsBuilder.append(openTag(stateList.get(index))); } return tagsBuilder.toString(); } private int closeUntil(StringBuilder tagsBuilder, String endTag) { for (int i = stateList.size(); i-- > 0;) { String tag = stateList.get(i); tagsBuilder.append(closeTag(tag)); if (endTag != null && tag.equals(endTag)) { return i; } } return -1; } private String openTag(String tag) { return "<" + tag + ">"; } private String closeTag(String tag) { return "</" + tag + ">"; } private boolean isBold(PDFontDescriptor descriptor) { if (descriptor.isForceBold()) { return true; } return descriptor.getFontName().contains("Bold"); } private boolean isItalic(PDFontDescriptor descriptor) { if (descriptor.isItalic()) { return true; } return descriptor.getFontName().contains("Italic"); } } }
tools/src/main/java/org/apache/pdfbox/tools/PDFText2HTML.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.tools; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.font.PDFontDescriptor; import org.apache.pdfbox.util.PDFTextStripper; import org.apache.pdfbox.text.TextPosition; /** * Wrap stripped text in simple HTML, trying to form HTML paragraphs. Paragraphs * broken by pages, columns, or figures are not mended. * * @author jjb - http://www.johnjbarton.com * */ public class PDFText2HTML extends PDFTextStripper { private static final int INITIAL_PDF_TO_HTML_BYTES = 8192; private boolean onFirstPage = true; private final FontState fontState = new FontState(); /** * Constructor. * @throws IOException If there is an error during initialization. */ public PDFText2HTML() throws IOException { super(); setLineSeparator(LINE_SEPARATOR); setParagraphStart("<p>"); setParagraphEnd("</p>"+ LINE_SEPARATOR); setPageStart("<div style=\"page-break-before:always; page-break-after:always\">"); setPageEnd("</div>"+ LINE_SEPARATOR); setArticleStart(LINE_SEPARATOR); setArticleEnd(LINE_SEPARATOR); } /** * Write the header to the output document. Now also writes the tag defining * the character encoding. * * @throws IOException * If there is a problem writing out the header to the document. */ protected void writeHeader() throws IOException { StringBuffer buf = new StringBuffer(INITIAL_PDF_TO_HTML_BYTES); buf.append("<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\"" + "\n" + "\"http://www.w3.org/TR/html4/loose.dtd\">\n"); buf.append("<html><head>"); buf.append("<title>" + escape(getTitle()) + "</title>\n"); buf.append("<meta http-equiv=\"Content-Type\" content=\"text/html; charset=\"UTF-16\">\n"); buf.append("</head>\n"); buf.append("<body>\n"); super.writeString(buf.toString()); } /** * {@inheritDoc} */ @Override protected void writePage() throws IOException { if (onFirstPage) { writeHeader(); onFirstPage = false; } super.writePage(); } /** * {@inheritDoc} */ @Override public void endDocument(PDDocument document) throws IOException { super.writeString("</body></html>"); } /** * This method will attempt to guess the title of the document using * either the document properties or the first lines of text. * * @return returns the title. */ protected String getTitle() { String titleGuess = document.getDocumentInformation().getTitle(); if(titleGuess != null && titleGuess.length() > 0) { return titleGuess; } else { Iterator<List<TextPosition>> textIter = getCharactersByArticle().iterator(); float lastFontSize = -1.0f; StringBuffer titleText = new StringBuffer(); while (textIter.hasNext()) { Iterator<TextPosition> textByArticle = textIter.next().iterator(); while (textByArticle.hasNext()) { TextPosition position = textByArticle.next(); float currentFontSize = position.getFontSize(); //If we're past 64 chars we will assume that we're past the title //64 is arbitrary if (currentFontSize != lastFontSize || titleText.length() > 64) { if (titleText.length() > 0) { return titleText.toString(); } lastFontSize = currentFontSize; } if (currentFontSize > 13.0f) { // most body text is 12pt titleText.append(position.getUnicode()); } } } } return ""; } /** * Write out the article separator (div tag) with proper text direction * information. * * @param isLTR true if direction of text is left to right * @throws IOException * If there is an error writing to the stream. */ @Override protected void startArticle(boolean isLTR) throws IOException { if (isLTR) { super.writeString("<div>"); } else { super.writeString("<div dir=\"RTL\">"); } } /** * Write out the article separator. * * @throws IOException * If there is an error writing to the stream. */ @Override protected void endArticle() throws IOException { super.endArticle(); super.writeString("</div>"); } /** * Write a string to the output stream, maintain font state, and escape some HTML characters. * The font state is only preserved per word. * * @param text The text to write to the stream. * @param textPositions the corresponding text positions * @throws IOException If there is an error writing to the stream. */ @Override protected void writeString(String text, List<TextPosition> textPositions) throws IOException { super.writeString(fontState.push(text, textPositions)); } /** * Write a string to the output stream and escape some HTML characters. * * @param chars String to be written to the stream * @throws IOException * If there is an error writing to the stream. */ @Override protected void writeString(String chars) throws IOException { super.writeString(escape(chars)); } /** * Writes the paragraph end "</p>" to the output. Furthermore, it will also clear the font state. * * {@inheritDoc} */ @Override protected void writeParagraphEnd() throws IOException { super.writeString(fontState.clear()); // do not escape HTML super.writeParagraphEnd(); } /** * Escape some HTML characters. * * @param chars String to be escaped * @return returns escaped String. */ private static String escape(String chars) { StringBuilder builder = new StringBuilder(chars.length()); for (int i = 0; i < chars.length(); i++) { appendEscaped(builder, chars.charAt(i)); } return builder.toString(); } private static void appendEscaped(StringBuilder builder, char character) { // write non-ASCII as named entities if ((character < 32) || (character > 126)) { int charAsInt = character; builder.append("&#").append(charAsInt).append(";"); } else { switch (character) { case 34: builder.append("&quot;"); break; case 38: builder.append("&amp;"); break; case 60: builder.append("&lt;"); break; case 62: builder.append("&gt;"); break; default: builder.append(String.valueOf(character)); } } } /** * A helper class to maintain the current font state. It's public methods will emit opening and * closing tags as needed, and in the correct order. * * @author Axel D�rfler */ private static class FontState { protected List<String> stateList = new ArrayList<String>(); protected Set<String> stateSet = new HashSet<String>(); /** * Pushes new {@link TextPosition TextPositions} into the font state. The state is only * preserved correctly for each letter if the number of letters in <code>text</code> matches * the number of {@link TextPosition} objects. Otherwise, it's done once for the complete * array (just by looking at its first entry). * * @return A string that contains the text including tag changes caused by its font state. */ public String push(String text, List<TextPosition> textPositions) { StringBuilder buffer = new StringBuilder(); if (text.length() == textPositions.size()) { // There is a 1:1 mapping, and we can use the TextPositions directly for (int i = 0; i < text.length(); i++) { push(buffer, text.charAt(i), textPositions.get(i)); } } else if (!text.isEmpty()) { // The normalized text does not match the number of TextPositions, so we'll just // have a look at its first entry. // TODO change PDFTextStripper.normalize() such that it maintains the 1:1 relation if (textPositions.isEmpty()) { return text; } push(buffer, text.charAt(0), textPositions.get(0)); buffer.append(escape(text.substring(1))); } return buffer.toString(); } /** * Closes all open states. * @return A string that contains the closing tags of all currently open states. */ public String clear() { StringBuilder buffer = new StringBuilder(); closeUntil(buffer, null); stateList.clear(); stateSet.clear(); return buffer.toString(); } protected String push(StringBuilder buffer, char character, TextPosition textPosition) { boolean bold = false; boolean italics = false; PDFontDescriptor descriptor = textPosition.getFont().getFontDescriptor(); if (descriptor != null) { bold = isBold(descriptor); italics = isItalic(descriptor); } buffer.append(bold ? open("b") : close("b")); buffer.append(italics ? open("i") : close("i")); appendEscaped(buffer, character); return buffer.toString(); } private String open(String tag) { if (stateSet.contains(tag)) { return ""; } stateList.add(tag); stateSet.add(tag); return openTag(tag); } private String close(String tag) { if (!stateSet.contains(tag)) { return ""; } // Close all tags until (but including) the one we should close StringBuilder tagsBuilder = new StringBuilder(); int index = closeUntil(tagsBuilder, tag); // Remove from state stateList.remove(index); stateSet.remove(tag); // Now open the states that were closed but should remain open again for (; index < stateList.size(); index++) { tagsBuilder.append(openTag(stateList.get(index))); } return tagsBuilder.toString(); } private int closeUntil(StringBuilder tagsBuilder, String endTag) { for (int i = stateList.size(); i-- > 0;) { String tag = stateList.get(i); tagsBuilder.append(closeTag(tag)); if (endTag != null && tag.equals(endTag)) { return i; } } return -1; } private String openTag(String tag) { return "<" + tag + ">"; } private String closeTag(String tag) { return "</" + tag + ">"; } private boolean isBold(PDFontDescriptor descriptor) { if (descriptor.isForceBold()) { return true; } return descriptor.getFontName().contains("Bold"); } private boolean isItalic(PDFontDescriptor descriptor) { if (descriptor.isItalic()) { return true; } return descriptor.getFontName().contains("Italic"); } } }
PDFBOX-2576: utf8 git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1651434 13f79535-47bb-0310-9956-ffa450edef68
tools/src/main/java/org/apache/pdfbox/tools/PDFText2HTML.java
PDFBOX-2576: utf8
Java
apache-2.0
7d6907430e144b15f2eb1813b8de5a8f12c7dd79
0
apache/incubator-twill,serranom/twill,vesense/incubator-twill,cdapio/twill,chtyim/incubator-twill,codeboyyong/incubator-twill,sanojkodikkara/incubator-twill
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.twill.internal; import org.apache.twill.filesystem.Location; import org.apache.twill.internal.utils.Dependencies; import com.google.common.base.Function; import com.google.common.base.Splitter; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.io.ByteStreams; import com.google.common.io.Files; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.net.URL; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Queue; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import java.util.zip.CRC32; import java.util.zip.CheckedOutputStream; /** * This class builds jar files based on class dependencies. */ public final class ApplicationBundler { private static final Logger LOG = LoggerFactory.getLogger(ApplicationBundler.class); public static final String SUBDIR_CLASSES = "classes/"; public static final String SUBDIR_LIB = "lib/"; public static final String SUBDIR_RESOURCES = "resources/"; private final List<String> excludePackages; private final List<String> includePackages; private final Set<String> bootstrapClassPaths; private final CRC32 crc32; /** * Constructs a ApplicationBundler. * * @param excludePackages Class packages to exclude */ public ApplicationBundler(Iterable<String> excludePackages) { this(excludePackages, ImmutableList.<String>of()); } /** * Constructs a ApplicationBundler. * * @param excludePackages Class packages to exclude * @param includePackages Class packages that should be included. Anything in this list will override the * one provided in excludePackages. */ public ApplicationBundler(Iterable<String> excludePackages, Iterable<String> includePackages) { this.excludePackages = ImmutableList.copyOf(excludePackages); this.includePackages = ImmutableList.copyOf(includePackages); ImmutableSet.Builder<String> builder = ImmutableSet.builder(); for (String classpath : Splitter.on(File.pathSeparatorChar).split(System.getProperty("sun.boot.class.path"))) { File file = new File(classpath); builder.add(file.getAbsolutePath()); try { builder.add(file.getCanonicalPath()); } catch (IOException e) { // Ignore the exception and proceed. } } this.bootstrapClassPaths = builder.build(); this.crc32 = new CRC32(); } public void createBundle(Location target, Iterable<Class<?>> classes) throws IOException { createBundle(target, classes, ImmutableList.<URI>of()); } /** * Same as calling {@link #createBundle(Location, Iterable)}. */ public void createBundle(Location target, Class<?> clz, Class<?>...classes) throws IOException { createBundle(target, ImmutableSet.<Class<?>>builder().add(clz).add(classes).build()); } /** * Creates a jar file which includes all the given classes and all the classes that they depended on. * The jar will also include all classes and resources under the packages as given as include packages * in the constructor. * * @param target Where to save the target jar file. * @param resources Extra resources to put into the jar file. If resource is a jar file, it'll be put under * lib/ entry, otherwise under the resources/ entry. * @param classes Set of classes to start the dependency traversal. * @throws IOException */ public void createBundle(Location target, Iterable<Class<?>> classes, Iterable<URI> resources) throws IOException { LOG.debug("start creating bundle {}. building a temporary file locally at first", target.getName()); // Write the jar to local tmp file first File tmpJar = File.createTempFile(target.getName(), ".tmp"); try { Set<String> entries = Sets.newHashSet(); JarOutputStream jarOut = new JarOutputStream(new FileOutputStream(tmpJar)); try { // Find class dependencies findDependencies(classes, entries, jarOut); // Add extra resources for (URI resource : resources) { copyResource(resource, entries, jarOut); } } finally { jarOut.close(); } LOG.debug("copying temporary bundle to destination {} ({} bytes)", target.toURI(), tmpJar.length()); // Copy the tmp jar into destination. OutputStream os = null; try { os = new BufferedOutputStream(target.getOutputStream()); Files.copy(tmpJar, os); } catch (IOException e) { throw new IOException("failed to copy bundle from " + tmpJar.toURI() + " to " + target.toURI(), e); } finally { if (os != null) { os.close(); } } LOG.debug("finished creating bundle at {}", target.toURI()); } finally { tmpJar.delete(); LOG.debug("cleaned up local temporary for bundle {}", tmpJar.toURI()); } } private void findDependencies(Iterable<Class<?>> classes, final Set<String> entries, final JarOutputStream jarOut) throws IOException { Iterable<String> classNames = Iterables.transform(classes, new Function<Class<?>, String>() { @Override public String apply(Class<?> input) { return input.getName(); } }); ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if (classLoader == null) { classLoader = getClass().getClassLoader(); } Dependencies.findClassDependencies(classLoader, new Dependencies.ClassAcceptor() { @Override public boolean accept(String className, URL classUrl, URL classPathUrl) { if (bootstrapClassPaths.contains(classPathUrl.getFile())) { return false; } boolean shouldInclude = false; for (String include : includePackages) { if (className.startsWith(include)) { shouldInclude = true; break; } } if (!shouldInclude) { for (String exclude : excludePackages) { if (className.startsWith(exclude)) { return false; } } } putEntry(className, classUrl, classPathUrl, entries, jarOut); return true; } }, classNames); } private void putEntry(String className, URL classUrl, URL classPathUrl, Set<String> entries, JarOutputStream jarOut) { String classPath = classPathUrl.getFile(); if (classPath.endsWith(".jar")) { saveDirEntry(SUBDIR_LIB, entries, jarOut); saveEntry(SUBDIR_LIB + classPath.substring(classPath.lastIndexOf('/') + 1), classPathUrl, entries, jarOut, false); } else { // Class file, put it under the classes directory saveDirEntry(SUBDIR_CLASSES, entries, jarOut); if ("file".equals(classPathUrl.getProtocol())) { // Copy every files under the classPath try { copyDir(new File(classPathUrl.toURI()), SUBDIR_CLASSES, entries, jarOut); } catch (Exception e) { throw Throwables.propagate(e); } } else { String entry = SUBDIR_CLASSES + className.replace('.', '/') + ".class"; saveDirEntry(entry.substring(0, entry.lastIndexOf('/') + 1), entries, jarOut); saveEntry(entry, classUrl, entries, jarOut, true); } } } /** * Saves a directory entry to the jar output. */ private void saveDirEntry(String path, Set<String> entries, JarOutputStream jarOut) { if (entries.contains(path)) { return; } try { String entry = ""; for (String dir : Splitter.on('/').omitEmptyStrings().split(path)) { entry += dir + '/'; if (entries.add(entry)) { JarEntry jarEntry = new JarEntry(entry); jarEntry.setMethod(JarOutputStream.STORED); jarEntry.setSize(0L); jarEntry.setCrc(0L); jarOut.putNextEntry(jarEntry); jarOut.closeEntry(); } } } catch (IOException e) { throw Throwables.propagate(e); } } /** * Saves a class entry to the jar output. */ private void saveEntry(String entry, URL url, Set<String> entries, JarOutputStream jarOut, boolean compress) { LOG.debug("adding bundle entry " + entry); if (!entries.add(entry)) { return; } try { JarEntry jarEntry = new JarEntry(entry); InputStream is = url.openStream(); try { if (compress) { jarOut.putNextEntry(jarEntry); ByteStreams.copy(is, jarOut); } else { crc32.reset(); TransferByteOutputStream os = new TransferByteOutputStream(); CheckedOutputStream checkedOut = new CheckedOutputStream(os, crc32); ByteStreams.copy(is, checkedOut); checkedOut.close(); long size = os.size(); jarEntry.setMethod(JarEntry.STORED); jarEntry.setSize(size); jarEntry.setCrc(checkedOut.getChecksum().getValue()); jarOut.putNextEntry(jarEntry); os.transfer(jarOut); } } finally { is.close(); } jarOut.closeEntry(); } catch (Exception e) { throw Throwables.propagate(e); } } /** * Copies all entries under the file path. */ private void copyDir(File baseDir, String entryPrefix, Set<String> entries, JarOutputStream jarOut) throws IOException { LOG.debug("adding whole dir {} to bundle at '{}'", baseDir, entryPrefix); URI baseUri = baseDir.toURI(); Queue<File> queue = Lists.newLinkedList(); Collections.addAll(queue, baseDir.listFiles()); while (!queue.isEmpty()) { File file = queue.remove(); String entry = entryPrefix + baseUri.relativize(file.toURI()).getPath(); if (entries.add(entry)) { jarOut.putNextEntry(new JarEntry(entry)); if (file.isFile()) { try { Files.copy(file, jarOut); } catch (IOException e) { throw new IOException("failure copying from " + file.getAbsoluteFile() + " to JAR file entry " + entry, e); } } jarOut.closeEntry(); } if (file.isDirectory()) { File[] files = file.listFiles(); if (files != null) { queue.addAll(Arrays.asList(files)); } } } } private void copyResource(URI resource, Set<String> entries, JarOutputStream jarOut) throws IOException { if ("file".equals(resource.getScheme())) { File file = new File(resource); if (file.isDirectory()) { saveDirEntry(SUBDIR_RESOURCES, entries, jarOut); copyDir(file, SUBDIR_RESOURCES, entries, jarOut); return; } } URL url = resource.toURL(); String path = url.getFile(); String prefix = path.endsWith(".jar") ? SUBDIR_LIB : SUBDIR_RESOURCES; path = prefix + path.substring(path.lastIndexOf('/') + 1); saveDirEntry(prefix, entries, jarOut); jarOut.putNextEntry(new JarEntry(path)); InputStream is = url.openStream(); try { ByteStreams.copy(is, jarOut); } finally { is.close(); } } private static final class TransferByteOutputStream extends ByteArrayOutputStream { public void transfer(OutputStream os) throws IOException { os.write(buf, 0, count); } } }
core/src/main/java/org/apache/twill/internal/ApplicationBundler.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.twill.internal; import org.apache.twill.filesystem.Location; import org.apache.twill.internal.utils.Dependencies; import com.google.common.base.Function; import com.google.common.base.Splitter; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.io.ByteStreams; import com.google.common.io.Files; import java.io.BufferedOutputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.net.URL; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Queue; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import java.util.zip.CRC32; import java.util.zip.CheckedOutputStream; /** * This class builds jar files based on class dependencies. */ public final class ApplicationBundler { private final List<String> excludePackages; private final List<String> includePackages; private final Set<String> bootstrapClassPaths; private final CRC32 crc32; /** * Constructs a ApplicationBundler. * * @param excludePackages Class packages to exclude */ public ApplicationBundler(Iterable<String> excludePackages) { this(excludePackages, ImmutableList.<String>of()); } /** * Constructs a ApplicationBundler. * * @param excludePackages Class packages to exclude * @param includePackages Class packages that should be included. Anything in this list will override the * one provided in excludePackages. */ public ApplicationBundler(Iterable<String> excludePackages, Iterable<String> includePackages) { this.excludePackages = ImmutableList.copyOf(excludePackages); this.includePackages = ImmutableList.copyOf(includePackages); ImmutableSet.Builder<String> builder = ImmutableSet.builder(); for (String classpath : Splitter.on(File.pathSeparatorChar).split(System.getProperty("sun.boot.class.path"))) { File file = new File(classpath); builder.add(file.getAbsolutePath()); try { builder.add(file.getCanonicalPath()); } catch (IOException e) { // Ignore the exception and proceed. } } this.bootstrapClassPaths = builder.build(); this.crc32 = new CRC32(); } public void createBundle(Location target, Iterable<Class<?>> classes) throws IOException { createBundle(target, classes, ImmutableList.<URI>of()); } /** * Same as calling {@link #createBundle(Location, Iterable)}. */ public void createBundle(Location target, Class<?> clz, Class<?>...classes) throws IOException { createBundle(target, ImmutableSet.<Class<?>>builder().add(clz).add(classes).build()); } /** * Creates a jar file which includes all the given classes and all the classes that they depended on. * The jar will also include all classes and resources under the packages as given as include packages * in the constructor. * * @param target Where to save the target jar file. * @param resources Extra resources to put into the jar file. If resource is a jar file, it'll be put under * lib/ entry, otherwise under the resources/ entry. * @param classes Set of classes to start the dependency traversal. * @throws IOException */ public void createBundle(Location target, Iterable<Class<?>> classes, Iterable<URI> resources) throws IOException { // Write the jar to local tmp file first File tmpJar = File.createTempFile(target.getName(), ".tmp"); try { Set<String> entries = Sets.newHashSet(); JarOutputStream jarOut = new JarOutputStream(new FileOutputStream(tmpJar)); try { // Find class dependencies findDependencies(classes, entries, jarOut); // Add extra resources for (URI resource : resources) { copyResource(resource, entries, jarOut); } } finally { jarOut.close(); } // Copy the tmp jar into destination. OutputStream os = new BufferedOutputStream(target.getOutputStream()); try { Files.copy(tmpJar, os); } finally { os.close(); } } finally { tmpJar.delete(); } } private void findDependencies(Iterable<Class<?>> classes, final Set<String> entries, final JarOutputStream jarOut) throws IOException { Iterable<String> classNames = Iterables.transform(classes, new Function<Class<?>, String>() { @Override public String apply(Class<?> input) { return input.getName(); } }); ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); if (classLoader == null) { classLoader = getClass().getClassLoader(); } Dependencies.findClassDependencies(classLoader, new Dependencies.ClassAcceptor() { @Override public boolean accept(String className, URL classUrl, URL classPathUrl) { if (bootstrapClassPaths.contains(classPathUrl.getFile())) { return false; } boolean shouldInclude = false; for (String include : includePackages) { if (className.startsWith(include)) { shouldInclude = true; break; } } if (!shouldInclude) { for (String exclude : excludePackages) { if (className.startsWith(exclude)) { return false; } } } putEntry(className, classUrl, classPathUrl, entries, jarOut); return true; } }, classNames); } private void putEntry(String className, URL classUrl, URL classPathUrl, Set<String> entries, JarOutputStream jarOut) { String classPath = classPathUrl.getFile(); if (classPath.endsWith(".jar")) { saveDirEntry("lib/", entries, jarOut); saveEntry("lib/" + classPath.substring(classPath.lastIndexOf('/') + 1), classPathUrl, entries, jarOut, false); } else { // Class file, put it under the classes directory saveDirEntry("classes/", entries, jarOut); if ("file".equals(classPathUrl.getProtocol())) { // Copy every files under the classPath try { copyDir(new File(classPathUrl.toURI()), "classes/", entries, jarOut); } catch (Exception e) { throw Throwables.propagate(e); } } else { String entry = "classes/" + className.replace('.', '/') + ".class"; saveDirEntry(entry.substring(0, entry.lastIndexOf('/') + 1), entries, jarOut); saveEntry(entry, classUrl, entries, jarOut, true); } } } /** * Saves a directory entry to the jar output. */ private void saveDirEntry(String path, Set<String> entries, JarOutputStream jarOut) { if (entries.contains(path)) { return; } try { String entry = ""; for (String dir : Splitter.on('/').omitEmptyStrings().split(path)) { entry += dir + '/'; if (entries.add(entry)) { JarEntry jarEntry = new JarEntry(entry); jarEntry.setMethod(JarOutputStream.STORED); jarEntry.setSize(0L); jarEntry.setCrc(0L); jarOut.putNextEntry(jarEntry); jarOut.closeEntry(); } } } catch (IOException e) { throw Throwables.propagate(e); } } /** * Saves a class entry to the jar output. */ private void saveEntry(String entry, URL url, Set<String> entries, JarOutputStream jarOut, boolean compress) { if (!entries.add(entry)) { return; } try { JarEntry jarEntry = new JarEntry(entry); InputStream is = url.openStream(); try { if (compress) { jarOut.putNextEntry(jarEntry); ByteStreams.copy(is, jarOut); } else { crc32.reset(); TransferByteOutputStream os = new TransferByteOutputStream(); CheckedOutputStream checkedOut = new CheckedOutputStream(os, crc32); ByteStreams.copy(is, checkedOut); checkedOut.close(); long size = os.size(); jarEntry.setMethod(JarEntry.STORED); jarEntry.setSize(size); jarEntry.setCrc(checkedOut.getChecksum().getValue()); jarOut.putNextEntry(jarEntry); os.transfer(jarOut); } } finally { is.close(); } jarOut.closeEntry(); } catch (Exception e) { throw Throwables.propagate(e); } } /** * Copies all entries under the file path. */ private void copyDir(File baseDir, String entryPrefix, Set<String> entries, JarOutputStream jarOut) throws IOException { URI baseUri = baseDir.toURI(); Queue<File> queue = Lists.newLinkedList(); Collections.addAll(queue, baseDir.listFiles()); while (!queue.isEmpty()) { File file = queue.remove(); String entry = entryPrefix + baseUri.relativize(file.toURI()).getPath(); if (entries.add(entry)) { jarOut.putNextEntry(new JarEntry(entry)); if (file.isFile()) { Files.copy(file, jarOut); } jarOut.closeEntry(); } if (file.isDirectory()) { File[] files = file.listFiles(); if (files != null) { queue.addAll(Arrays.asList(files)); } } } } private void copyResource(URI resource, Set<String> entries, JarOutputStream jarOut) throws IOException { if ("file".equals(resource.getScheme())) { File file = new File(resource); if (file.isDirectory()) { saveDirEntry("resources/", entries, jarOut); copyDir(file, "resources/", entries, jarOut); return; } } URL url = resource.toURL(); String path = url.getFile(); String prefix = path.endsWith(".jar") ? "lib/" : "resources/"; path = prefix + path.substring(path.lastIndexOf('/') + 1); saveDirEntry(prefix, entries, jarOut); jarOut.putNextEntry(new JarEntry(path)); InputStream is = url.openStream(); try { ByteStreams.copy(is, jarOut); } finally { is.close(); } } private static final class TransferByteOutputStream extends ByteArrayOutputStream { public void transfer(OutputStream os) throws IOException { os.write(buf, 0, count); } } }
[TWILL-15] Apply patch for "Detailed error message when copy to HDFS fails"
core/src/main/java/org/apache/twill/internal/ApplicationBundler.java
[TWILL-15] Apply patch for "Detailed error message when copy to HDFS fails"
Java
apache-2.0
2da39b1d9508d89b48ec9921164fa9b16d2f3b28
0
SpineEventEngine/core-java,SpineEventEngine/core-java,SpineEventEngine/core-java
/* * Copyright 2019, TeamDev. All rights reserved. * * Redistribution and use in source and/or binary forms, with or without * modification, must retain the above copyright notice and the following * disclaimer. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.spine.server.model; import com.google.common.base.MoreObjects; import com.google.errorprone.annotations.Immutable; import com.google.protobuf.Message; import org.checkerframework.checker.nullness.qual.Nullable; import java.util.Objects; /** * Provides information for dispatching a message to a handler method. */ @Immutable public final class DispatchKey { private final Class<? extends Message> messageClass; private final @Nullable ArgumentFilter filter; private final @Nullable Class<? extends Message> originClass; public DispatchKey(Class<? extends Message> messageClass, @Nullable ArgumentFilter filter, @Nullable Class<? extends Message> originClass) { this.messageClass = messageClass; this.filter = filter; this.originClass = originClass; } /** * Obtains a filter-less version of this. * * <p>If this key has a filter, a new instance is created, which copies this key data * without the filter. Otherwise, this instance is returned. */ DispatchKey withoutFilter() { if (filter == null) { return this; } return new DispatchKey(messageClass, null, originClass); } /** * Creates a new key copying its data and taking the passed filter. */ public DispatchKey withFilter(ArgumentFilter filter) { return new DispatchKey(messageClass, filter, originClass); } @Override public int hashCode() { return Objects.hash(messageClass, filter, originClass); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } final DispatchKey other = (DispatchKey) obj; return Objects.equals(this.messageClass, other.messageClass) && Objects.equals(this.filter, other.filter) && Objects.equals(this.originClass, other.originClass); } @SuppressWarnings("DuplicateStringLiteralInspection") // Both classes have `filter` field. @Override public String toString() { MoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this); helper.add("messageClass", messageClass.getName()); if (filter != null && !filter.acceptsAll()) { helper.add("filter", filter); } if (originClass != null) { helper.add("originClass", originClass); } return helper.toString(); } }
server/src/main/java/io/spine/server/model/DispatchKey.java
/* * Copyright 2019, TeamDev. All rights reserved. * * Redistribution and use in source and/or binary forms, with or without * modification, must retain the above copyright notice and the following * disclaimer. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package io.spine.server.model; import com.google.common.base.MoreObjects; import com.google.errorprone.annotations.Immutable; import com.google.protobuf.Message; import org.checkerframework.checker.nullness.qual.Nullable; import java.util.Objects; /** * Provides information for dispatching a message to a handler method. */ @Immutable public final class DispatchKey { private final Class<? extends Message> messageClass; private final @Nullable ArgumentFilter filter; private final @Nullable Class<? extends Message> originClass; public DispatchKey(Class<? extends Message> messageClass, @Nullable ArgumentFilter filter, @Nullable Class<? extends Message> originClass) { this.messageClass = messageClass; this.filter = filter; this.originClass = originClass; } /** * Obtains a filter-less version of this. * * <p>If this key has a filter, a new instance is created, which copies this key data * without the filter. Otherwise, this instance is returned. */ DispatchKey withoutFilter() { if (filter == null) { return this; } return new DispatchKey(messageClass, null, originClass); } /** * Creates a new key copying its data and taking the passed filter. */ public DispatchKey withFilter(ArgumentFilter filter) { return new DispatchKey(messageClass, filter, originClass); } @Override public int hashCode() { return Objects.hash(messageClass, originClass); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } final DispatchKey other = (DispatchKey) obj; return Objects.equals(this.messageClass, other.messageClass) && Objects.equals(this.filter, other.filter) && Objects.equals(this.originClass, other.originClass); } @SuppressWarnings("DuplicateStringLiteralInspection") // Both classes have `filter` field. @Override public String toString() { MoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this); helper.add("messageClass", messageClass.getName()); if (filter != null && !filter.acceptsAll()) { helper.add("filter", filter); } if (originClass != null) { helper.add("originClass", originClass); } return helper.toString(); } }
Fix hash code
server/src/main/java/io/spine/server/model/DispatchKey.java
Fix hash code
Java
apache-2.0
866ac7dbc75c3fcebb837965341c27d663f74ae4
0
zaproxy/zaproxy,zaproxy/zaproxy,zaproxy/zaproxy,zaproxy/zaproxy,thc202/zaproxy,meitar/zaproxy,kingthorin/zaproxy,Ali-Razmjoo/zaproxy,zaproxy/zaproxy,psiinon/zaproxy,thc202/zaproxy,gmaran23/zaproxy,kingthorin/zaproxy,meitar/zaproxy,thc202/zaproxy,gmaran23/zaproxy,Ali-Razmjoo/zaproxy,thc202/zaproxy,gmaran23/zaproxy,psiinon/zaproxy,Ali-Razmjoo/zaproxy,Ali-Razmjoo/zaproxy,thc202/zaproxy,thc202/zaproxy,psiinon/zaproxy,zaproxy/zaproxy,meitar/zaproxy,psiinon/zaproxy,gmaran23/zaproxy,kingthorin/zaproxy,gmaran23/zaproxy,meitar/zaproxy,thc202/zaproxy,meitar/zaproxy,kingthorin/zaproxy,Ali-Razmjoo/zaproxy,Ali-Razmjoo/zaproxy,psiinon/zaproxy,kingthorin/zaproxy,meitar/zaproxy,gmaran23/zaproxy,meitar/zaproxy,kingthorin/zaproxy,psiinon/zaproxy,zaproxy/zaproxy,meitar/zaproxy,kingthorin/zaproxy,Ali-Razmjoo/zaproxy,psiinon/zaproxy,gmaran23/zaproxy
/* * * Paros and its related class files. * * Paros is an HTTP/HTTPS proxy for assessing web application security. * Copyright (C) 2003-2004 Chinotec Technologies Company * * This program is free software; you can redistribute it and/or * modify it under the terms of the Clarified Artistic License * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * Clarified Artistic License for more details. * * You should have received a copy of the Clarified Artistic License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ // ZAP: 2011/05/15 Support for exclusions // ZAP: 2011/08/30 Support for scanner levels // ZAP: 2012/02/18 Dont log errors for temporary hrefs // ZAP: 2012/03/15 Changed the method getPathRegex to use the class StringBuilder // instead of StringBuffer and replaced some string concatenations with calls // to the method append of the class StringBuilder. Removed unnecessary castings // in the methods scanSingleNode, notifyHostComplete and pluginCompleted. Changed // the methods processPlugin and pluginCompleted to use Long.valueOf instead of // creating a new Long. // ZAP: 2012/04/25 Added @Override annotation to the appropriate method. // ZAP: 2012/07/30 Issue 43: Added support for Scope // ZAP: 2012/08/07 Issue 342 Support the HttpSenderListener // ZAP: 2012/08/07 Renamed Level to AlertThreshold and added support for AttackStrength // ZAP: 2012/08/31 Enabled control of AttackStrength // ZAP: 2012/11/22 Issue 421: Cleanly shut down any active scan threads on shutdown // ZAP: 2013/01/19 Issue 460 Add support for a scan progress dialog // ZAP: 2013/03/08 Added some debug logging // ZAP: 2014/01/16 Add support to plugin skipping // ZAP: 2014/02/21 Issue 1043: Custom active scan dialog // ZAP: 2014/03/23 Issue 1084: NullPointerException while selecting a node in the "Sites" tab // ZAP: 2014/04/01 Changed to set a name to created threads. // ZAP: 2014/06/23 Issue 1241: Active scanner might not report finished state when using host scanners // ZAP: 2014/06/26 Added the possibility to evaluate the current plugin/process progress // ZAP: 2014/07/07 Issue 389: Enable technology scope for scanners // ZAP: 2014/08/14 Issue 1291: 407 Proxy Authentication Required while active scanning // ZAP: 2014/10/24 Issue 1378: Revamp active scan panel // ZAP: 2014/10/25 Issue 1062: Made it possible to hook into the active scanner from extensions // ZAP: 2014/11/19 Issue 1412: Manage scan policies // ZAP: 2015/02/18 Issue 1062: Tidied up extension hooks // ZAP: 2015/04/02 Issue 321: Support multiple databases and Issue 1582: Low memory option // ZAP: 2015/04/17 A problem occur when a single node should be scanned because count start from -1 // ZAP: 2015/05/04 Issue 1566: Improve active scan's reported progress // ZAP: 2015/07/26 Issue 1618: Target Technology Not Honored // ZAP: 2015/10/29 Issue 2005: Active scanning incorrectly performed on sibling nodes // ZAP: 2015/11/27 Issue 2086: Report request counts per plugin // ZAP: 2015/12/16 Prevent HostProcess (and plugins run) from becoming in undefined state // ZAP: 2016/01/27 Prevent HostProcess from reporting progress higher than 100% // ZAP: 2016/04/21 Allow scanners to notify of messages sent (and tweak the progress and request count of each plugin) // ZAP: 2016/06/29 Allow to specify and obtain the reason why a scanner was skipped // ZAP: 2016/07/12 Do not allow techSet to be null // ZAP: 2016/07/01 Issue 2647 Support a/pscan rule configuration // ZAP: 2016/09/20 - Reorder statements to prevent (potential) NullPointerException in scanSingleNode // - JavaDoc tweaks // ZAP: 2016/11/14 Restore and deprecate old constructor, to keep binary compatibility // ZAP: 2016/12/13 Issue 2951: Support active scan rule and scan max duration // ZAP: 2016/12/20 Include the name of the user when logging the scan info // ZAP: 2017/03/20 Improve node enumeration in pre-scan phase. // ZAP: 2017/03/20 Log the number of messages sent by the scanners, when finished. // ZAP: 2017/03/25 Ensure messages to be scanned have a response. // ZAP: 2017/06/07 Scan just one node with AbstractHostPlugin (they apply to the whole host not individual messages). // ZAP: 2017/06/08 Collect messages to be scanned. // ZAP: 2017/06/15 Initialise the plugin factory immediately after starting the scan. // ZAP: 2017/06/15 Do not start following plugin if the scanner is paused. // ZAP: 2017/06/20 Log number of alerts raised by each scanner. // ZAP: 2017/07/06 Expose plugin stats. // ZAP: 2017/07/12 Tweak the method used when initialising the PluginFactory. // ZAP: 2017/07/13 Automatically skip dependent scanners (Issue 3784) // ZAP: 2017/07/18 Allow to obtain the (total) alert count. // ZAP: 2017/09/27 Allow to skip scanners by ID and don't allow to skip scanners already finished/skipped. // ZAP: 2017/10/05 Replace usage of Class.newInstance (deprecated in Java 9). // ZAP: 2017/11/29 Skip plugins if there's nothing to scan. // ZAP: 2017/12/29 Provide means to validate the redirections. // ZAP: 2018/01/01 Update initialisation of PluginStats. // ZAP: 2018/11/14 Log alert count when completed. package org.parosproxy.paros.core.scanner; import java.io.IOException; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.common.ThreadPool; import org.parosproxy.paros.db.DatabaseException; import org.parosproxy.paros.model.HistoryReference; import org.parosproxy.paros.network.ConnectionParam; import org.parosproxy.paros.network.HttpMalformedHeaderException; import org.parosproxy.paros.network.HttpMessage; import org.parosproxy.paros.network.HttpSender; import org.zaproxy.zap.extension.ascan.ScanPolicy; import org.zaproxy.zap.extension.ruleconfig.RuleConfig; import org.zaproxy.zap.extension.ruleconfig.RuleConfigParam; import org.zaproxy.zap.model.SessionStructure; import org.zaproxy.zap.model.StructuralNode; import org.zaproxy.zap.model.TechSet; import org.zaproxy.zap.network.HttpRedirectionValidator; import org.zaproxy.zap.network.HttpRequestConfig; import org.zaproxy.zap.users.User; public class HostProcess implements Runnable { private static final Logger log = Logger.getLogger(HostProcess.class); private static final DecimalFormat decimalFormat = new java.text.DecimalFormat("###0.###"); private List<StructuralNode> startNodes = null; private boolean isStop = false; private PluginFactory pluginFactory; private ScannerParam scannerParam = null; private HttpSender httpSender = null; private ThreadPool threadPool = null; private Scanner parentScanner = null; private String hostAndPort = ""; private Analyser analyser = null; private Kb kb = null; private User user = null; private TechSet techSet; private RuleConfigParam ruleConfigParam; private String stopReason = null; /** * A {@code Map} from plugin IDs to corresponding {@link PluginStats}. * * @see #processPlugin(Plugin) */ private final Map<Integer, PluginStats> mapPluginStats = new HashMap<>(); private long hostProcessStartTime = 0; // ZAP: progress related private int nodeInScopeCount = 0; private int percentage = 0; /** * The count of requests sent by the {@code HostProcess} itself. */ private int requestCount; /** * The count of alerts raised during the scan. */ private int alertCount; /** * The ID of the message to be scanned by {@link AbstractHostPlugin}s. * <p> * As opposed to {@link AbstractAppPlugin}s, {@code AbstractHostPlugin}s just require one message to scan as they run * against the host (not individual messages/endpoints). * * @see #messagesIdsToAppScan */ private int messageIdToHostScan; /** * The IDs of the messages to be scanned by {@link AbstractAppPlugin}s. * * @see #messageIdToHostScan */ private List<Integer> messagesIdsToAppScan; /** * The HTTP request configuration, uses a {@link HttpRedirectionValidator} that ensures the followed redirections are in * scan's scope. * <p> * Lazily initialised. * * @see #getRedirectRequestConfig() * @see #redirectionValidator */ private HttpRequestConfig redirectRequestConfig; /** * The redirection validator that ensures the followed redirections are in scan's scope. * <p> * Lazily initialised. * * @see #getRedirectionValidator() * @see #redirectRequestConfig */ private HttpRedirectionValidator redirectionValidator; /** * Constructs a {@code HostProcess}, with no rules' configurations. * * @param hostAndPort the host:port value of the site that need to be processed * @param parentScanner the scanner instance which instantiated this process * @param scannerParam the session scanner parameters * @param connectionParam the connection parameters * @param scanPolicy the scan policy * @deprecated Use {@link #HostProcess(String, Scanner, ScannerParam, ConnectionParam, ScanPolicy, RuleConfigParam)} * instead. It will be removed in a future version. */ @Deprecated public HostProcess(String hostAndPort, Scanner parentScanner, ScannerParam scannerParam, ConnectionParam connectionParam, ScanPolicy scanPolicy) { this(hostAndPort, parentScanner, scannerParam, connectionParam, scanPolicy, null); } /** * Constructs a {@code HostProcess}. * * @param hostAndPort the host:port value of the site that need to be processed * @param parentScanner the scanner instance which instantiated this process * @param scannerParam the session scanner parameters * @param connectionParam the connection parameters * @param scanPolicy the scan policy * @param ruleConfigParam the rules' configurations, might be {@code null}. * @since 2.6.0 */ public HostProcess(String hostAndPort, Scanner parentScanner, ScannerParam scannerParam, ConnectionParam connectionParam, ScanPolicy scanPolicy, RuleConfigParam ruleConfigParam) { super(); this.hostAndPort = hostAndPort; this.parentScanner = parentScanner; this.scannerParam = scannerParam; this.pluginFactory = scanPolicy.getPluginFactory().clone(); this.ruleConfigParam = ruleConfigParam; this.messageIdToHostScan = -1; this.messagesIdsToAppScan = new ArrayList<>(); httpSender = new HttpSender(connectionParam, true, HttpSender.ACTIVE_SCANNER_INITIATOR); httpSender.setUser(this.user); httpSender.setRemoveUserDefinedAuthHeaders(true); int maxNumberOfThreads; if (scannerParam.getHandleAntiCSRFTokens()) { // Single thread if handling anti CSRF tokens, otherwise token requests might get out of step maxNumberOfThreads = 1; } else { maxNumberOfThreads = scannerParam.getThreadPerHost(); } threadPool = new ThreadPool(maxNumberOfThreads, "ZAP-ActiveScanner-"); this.techSet = TechSet.AllTech; } /** * Set the initial starting node. * Should be set after the HostProcess initialization * @param startNode the start node we should start from */ public void setStartNode(StructuralNode startNode) { this.startNodes = new ArrayList<StructuralNode>(); this.startNodes.add(startNode); } public void addStartNode(StructuralNode startNode) { if (this.startNodes == null) { this.startNodes = new ArrayList<StructuralNode>(); } this.startNodes.add(startNode); } /** * Stop the current scanning process */ public void stop() { isStop = true; getAnalyser().stop(); } /** * Main execution method */ @Override public void run() { log.debug("HostProcess.run"); try { hostProcessStartTime = System.currentTimeMillis(); // Initialise plugin factory to report the state of the plugins ASAP. pluginFactory.reset(); synchronized (mapPluginStats) { for (Plugin plugin : pluginFactory.getPending()) { mapPluginStats.put(plugin.getId(), new PluginStats(plugin.getName())); } } for (StructuralNode startNode : startNodes) { traverse(startNode, true, node -> { if (canScanNode(node)) { messagesIdsToAppScan.add(node.getHistoryReference().getHistoryId()); } }); getAnalyser().start(startNode); } nodeInScopeCount = messagesIdsToAppScan.size(); if (!messagesIdsToAppScan.isEmpty()) { messageIdToHostScan = messagesIdsToAppScan.get(0); } logScanInfo(); Plugin plugin; while (!isStop() && pluginFactory.existPluginToRun()) { checkPause(); if (isStop()) { break; } plugin = pluginFactory.nextPlugin(); if (plugin != null) { plugin.setDelayInMs(this.scannerParam.getDelayInMs()); plugin.setTechSet(this.techSet); processPlugin(plugin); } else { // waiting for dependency - no test ready yet Util.sleep(1000); } } threadPool.waitAllThreadComplete(300000); } catch (Exception e) { log.error("An error occurred while active scanning:", e); stop(); } finally { notifyHostProgress(null); notifyHostComplete(); getHttpSender().shutdown(); } } /** * Logs information about the scan. * <p> * It logs the {@link #nodeInScopeCount number of nodes} that will be scanned and the name of the {@link #user}, if any. */ private void logScanInfo() { StringBuilder strBuilder = new StringBuilder(150); if (nodeInScopeCount != 0) { strBuilder.append("Scanning "); strBuilder.append(nodeInScopeCount); strBuilder.append(" node(s) "); } else { strBuilder.append("No nodes to scan "); } if (parentScanner.getJustScanInScope()) { strBuilder.append("[just in scope] "); } strBuilder.append("from ").append(hostAndPort); if (user != null) { strBuilder.append(" as "); strBuilder.append(user.getName()); } if (nodeInScopeCount == 0) { strBuilder.append(", skipping all plugins."); } log.info(strBuilder.toString()); } private void processPlugin(final Plugin plugin) { mapPluginStats.get(plugin.getId()).start(); if (nodeInScopeCount == 0) { pluginSkipped(plugin, Constant.messages.getString("ascan.progress.label.skipped.reason.nonodes")); pluginCompleted(plugin); return; } else if (!plugin.targets(techSet)) { pluginSkipped(plugin, Constant.messages.getString("ascan.progress.label.skipped.reason.techs")); pluginCompleted(plugin); return; } log.info("start host " + hostAndPort + " | " + plugin.getCodeName() + " strength " + plugin.getAttackStrength() + " threshold " + plugin.getAlertThreshold()); if (plugin instanceof AbstractHostPlugin) { checkPause(); if (isStop() || isSkipped(plugin) || !scanMessage(plugin, messageIdToHostScan)) { // Mark the plugin as completed if it was not run so the scan process can continue as expected. // The plugin might not be run, for example, if there was an error reading the message form DB. pluginCompleted(plugin); } } else if (plugin instanceof AbstractAppPlugin) { try { for (int messageId : messagesIdsToAppScan) { checkPause(); if (isStop() || isSkipped(plugin)) { return; } scanMessage(plugin, messageId); } threadPool.waitAllThreadComplete(600000); } finally { pluginCompleted(plugin); } } } private void traverse(StructuralNode node, boolean incRelatedSiblings, TraverseAction action) { if (node == null || isStop()) { return; } Set<StructuralNode> parentNodes = new HashSet<>(); parentNodes.add(node); action.apply(node); if (parentScanner.scanChildren()) { if (incRelatedSiblings) { // Also match siblings with the same hierarchic name // If we dont do this http://localhost/start might match the GET variant // in the Sites tree and miss the hierarchic node. // Note that this is only done for the top level try { Iterator<StructuralNode> iter = node.getParent().getChildIterator(); String nodeName = SessionStructure.getCleanRelativeName(node, false); while (iter.hasNext()) { StructuralNode sibling = iter.next(); if (! node.isSameAs(sibling) && nodeName.equals( SessionStructure.getCleanRelativeName(sibling, false))) { log.debug("traverse: including related sibling " + sibling.getName()); parentNodes.add(sibling); } } } catch (DatabaseException e) { // Ignore - if we cant connect to the db there will be plenty of other errors logged ;) } } for (StructuralNode pNode : parentNodes) { Iterator<StructuralNode> iter = pNode.getChildIterator(); while (iter.hasNext() && !isStop()) { checkPause(); try { traverse(iter.next(), false, action); } catch (Exception e) { log.error(e.getMessage(), e); } } } } } protected boolean nodeInScope(String nodeName) { return parentScanner.isInScope(nodeName); } /** * Scans the message with the given ID with the given plugin. * <p> * It's used a new instance of the given plugin. * * @param plugin the scanner * @param messageId the ID of the message. * @return {@code true} if the {@code plugin} was run, {@code false} otherwise. */ private boolean scanMessage(Plugin plugin, int messageId) { Plugin test; HistoryReference historyReference; HttpMessage msg; try { historyReference = new HistoryReference(messageId, true); msg = historyReference.getHttpMessage(); } catch (HttpMalformedHeaderException | DatabaseException e) { log.warn("Failed to read message with ID [" + messageId + "], cause: " + e.getMessage()); return false; } try { // Ensure the temporary nodes, added automatically to Sites tree, have a response. // The scanners might base the logic/attacks on the state of the response (e.g. status code). if (msg.getResponseHeader().isEmpty()) { msg = msg.cloneRequest(); if (!obtainResponse(historyReference, msg)) { return false; } } if (log.isDebugEnabled()) { log.debug("scanSingleNode node plugin=" + plugin.getName() + " node=" + historyReference.getURI().toString()); } test = plugin.getClass().getDeclaredConstructor().newInstance(); test.setConfig(plugin.getConfig()); if (this.ruleConfigParam != null) { // Set the configuration rules for (RuleConfig rc : this.ruleConfigParam.getAllRuleConfigs()) { test.getConfig().setProperty(rc.getKey(), rc.getValue()); } } test.setDelayInMs(plugin.getDelayInMs()); test.setDefaultAlertThreshold(plugin.getAlertThreshold()); test.setDefaultAttackStrength(plugin.getAttackStrength()); test.setTechSet(getTechSet()); test.init(msg, this); notifyHostProgress(plugin.getName() + ": " + msg.getRequestHeader().getURI().toString()); } catch (Exception e) { log.error(e.getMessage() + " " + historyReference.getURI().toString(), e); return false; } Thread thread; do { if (this.isStop()) { return false; } thread = threadPool.getFreeThreadAndRun(test); if (thread == null) { Util.sleep(200); } } while (thread == null); mapPluginStats.get(plugin.getId()).incProgress(); return true; } private boolean obtainResponse(HistoryReference hRef, HttpMessage message) { try { getHttpSender().sendAndReceive(message); notifyNewMessage(message); requestCount++; return true; } catch (IOException e) { log.warn( "Failed to obtain the HTTP response for href [id=" + hRef.getHistoryId() + ", type=" + hRef.getHistoryType() + ", URL=" + hRef.getURI() + "]: " + e.getMessage()); return false; } } /** * Tells whether or not the scanner can scan the given node. * <p> * A node must not be null, must contain a valid HistoryReference and be in scope. * * @param node the node to be checked * @return {@code true} if the node can be scanned, {@code false} otherwise. */ private boolean canScanNode(StructuralNode node) { if (node == null) { if (log.isDebugEnabled()) { log.debug("Ignoring null node"); } return false; } HistoryReference hRef = node.getHistoryReference(); if (hRef == null) { if (log.isDebugEnabled()) { log.debug("Ignoring null history reference for node: " + node.getName()); } return false; } if (HistoryReference.TYPE_SCANNER == hRef.getHistoryType()) { if (log.isDebugEnabled()) { log.debug("Ignoring \"scanner\" type href [id=" + hRef.getHistoryId() + ", URL=" + hRef.getURI() + "]"); } return false; } if (!nodeInScope(node.getName())) { if (log.isDebugEnabled()) { log.debug("Ignoring node not in scope: " + node.getName()); } return false; } return true; } /** * ZAP: method to get back the number of tests that need to be performed * @return the number of tests that need to be executed for this Scanner */ public int getTestTotalCount() { return nodeInScopeCount; } /** * ZAP: method to get back the current progress status of a specific plugin * @param plugin the plugin we're asking the progress * @return the current managed test count */ public int getTestCurrentCount(Plugin plugin) { PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats == null) { return 0; } return pluginStats.getProgress(); } /** * @deprecated (2.5.0) No longer used/needed, Plugin's progress is automatically updated/maintained by * {@code HostProcess}. * @param plugin unused * @param value unused */ @Deprecated public void setTestCurrentCount(Plugin plugin, int value) { // No longer used. } /** * @return Returns the httpSender. */ public HttpSender getHttpSender() { return httpSender; } /** * Check if the current host scan has been stopped * @return true if the process has been stopped */ public boolean isStop() { if (this.scannerParam.getMaxScanDurationInMins() > 0) { if (System.currentTimeMillis() - this.hostProcessStartTime > TimeUnit.MINUTES.toMillis(this.scannerParam.getMaxScanDurationInMins())) { this.stopReason = Constant.messages.getString("ascan.progress.label.skipped.reason.maxScan"); this.stop(); } } return (isStop || parentScanner.isStop()); } /** * Check if the current host scan has been paused * @return true if the process has been paused */ public boolean isPaused() { return parentScanner.isPaused(); } private void checkPause() { while (parentScanner.isPaused() && !isStop()) { Util.sleep(500); } } public int getPercentageComplete () { return this.percentage; } private void notifyHostProgress(String msg) { if (pluginFactory.totalPluginToRun() == 0) { percentage = 100; } else { int numberRunning = 0; double progressRunning = 0; for (Plugin plugin : pluginFactory.getRunning()) { int scannedNodes = getTestCurrentCount(plugin); double pluginPercentage = (scannedNodes * 100.0) / getTestTotalCount(); if (pluginPercentage >= 100) { // More nodes are being scanned that the ones enumerated at the beginning... // Update global count and... nodeInScopeCount = scannedNodes; // make sure not return 100 (or more). pluginPercentage = 99; } progressRunning += pluginPercentage; numberRunning++; } int avgRunning = (int) (progressRunning / numberRunning); percentage = ((100 * pluginFactory.totalPluginCompleted()) + avgRunning) / pluginFactory.totalPluginToRun(); } parentScanner.notifyHostProgress(hostAndPort, msg, percentage); } private void notifyHostComplete() { long diffTimeMillis = System.currentTimeMillis() - hostProcessStartTime; String diffTimeString = decimalFormat.format(diffTimeMillis / 1000.0) + "s"; log.info("completed host " + hostAndPort + " in " + diffTimeString + " with " + getAlertCount() + " alert(s) raised."); parentScanner.notifyHostComplete(hostAndPort); } /** * Notifies interested parties that a new message was sent (and received). * <p> * {@link Plugin Plugins} should call {@link #notifyNewMessage(Plugin)} or {@link #notifyNewMessage(Plugin, HttpMessage)}, * instead. * * @param msg the new HTTP message * @since 1.2.0 */ public void notifyNewMessage(HttpMessage msg) { parentScanner.notifyNewMessage(msg); } /** * Notifies that the given {@code plugin} sent (and received) the given HTTP message. * * @param plugin the plugin that sent the message * @param message the message sent * @throws IllegalArgumentException if the given {@code plugin} is {@code null}. * @since 2.5.0 * @see #notifyNewMessage(Plugin) */ public void notifyNewMessage(Plugin plugin, HttpMessage message) { parentScanner.notifyNewMessage(message); notifyNewMessage(plugin); } /** * Notifies that the given {@code plugin} sent (and received) a non-HTTP message. * <p> * The call to this method has no effect if there's no {@code Plugin} with the given ID (or, it was not yet started). * * @param plugin the plugin that sent a non-HTTP message * @throws IllegalArgumentException if the given parameter is {@code null}. * @since 2.5.0 * @see #notifyNewMessage(Plugin, HttpMessage) */ public void notifyNewMessage(Plugin plugin) { if (plugin == null) { throw new IllegalArgumentException("Parameter plugin must not be null."); } PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats != null) { pluginStats.incMessageCount(); } } public void alertFound(Alert alert) { parentScanner.notifyAlertFound(alert); PluginStats pluginStats = mapPluginStats.get(alert.getPluginId()); if (pluginStats != null) { pluginStats.incAlertCount(); } alertCount++; } /** * Gets the alert count. * * @return the alert count. * @since 2.7.0 */ public int getAlertCount() { return alertCount; } /** * Give back the current process's Analyzer * @return the HTTP analyzer */ public Analyser getAnalyser() { if (analyser == null) { analyser = new Analyser(getHttpSender(), this); } return analyser; } /** * Gets the HTTP request configuration that ensures the followed redirections are in scan's scope. * * @return the HTTP request configuration, never {@code null}. * @since TODO add version * @see #getRedirectionValidator() */ HttpRequestConfig getRedirectRequestConfig() { if (redirectRequestConfig == null) { redirectRequestConfig = HttpRequestConfig.builder().setRedirectionValidator(getRedirectionValidator()).build(); } return redirectRequestConfig; } /** * Gets the redirection validator that ensures the followed redirections are in scan's scope. * * @return the redirection validator, never {@code null}. * @since TODO add version * @see #getRedirectRequestConfig() */ HttpRedirectionValidator getRedirectionValidator() { if (redirectionValidator == null) { redirectionValidator = redirection -> { if (!nodeInScope(redirection.getEscapedURI())) { if (log.isDebugEnabled()) { log.debug("Skipping redirection out of scan's scope: " + redirection); } return false; } return true; }; } return redirectionValidator; } public boolean handleAntiCsrfTokens() { return this.scannerParam.getHandleAntiCSRFTokens(); } /** * Skips the given plugin. * <p> * <strong>Note:</strong> Whenever possible callers should use {@link #pluginSkipped(Plugin, String)} instead. * * @param plugin the plugin that will be skipped, must not be {@code null} * @since 2.4.0 */ public void pluginSkipped(Plugin plugin) { pluginSkipped(plugin, null); } /** * Skips the plugin with the given ID with the given {@code reason}. * <p> * Ideally the {@code reason} should be internationalised as it is shown in the GUI. * * @param pluginId the ID of the plugin that will be skipped. * @param reason the reason why the plugin was skipped, might be {@code null}. * @since 2.7.0 * @see #pluginSkipped(Plugin, String) */ public void pluginSkipped(int pluginId, String reason) { Plugin plugin = pluginFactory.getPlugin(pluginId); if (plugin == null) { return; } pluginSkipped(plugin, reason); } /** * Skips the given {@code plugin} with the given {@code reason}. * <p> * Ideally the {@code reason} should be internationalised as it is shown in the GUI. * * @param plugin the plugin that will be skipped, must not be {@code null} * @param reason the reason why the plugin was skipped, might be {@code null} * @since 2.6.0 */ public void pluginSkipped(Plugin plugin, String reason) { if (isStop()) { return; } PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats == null || pluginStats.isSkipped() || pluginFactory.getCompleted().contains(plugin)) { return; } pluginStats.skip(); pluginStats.setSkippedReason(reason); for (Plugin dependent : pluginFactory.getDependentPlugins(plugin)) { pluginStats = mapPluginStats.get(dependent.getId()); if (pluginStats != null && !pluginStats.isSkipped() && !pluginFactory.getCompleted().contains(dependent)) { pluginStats.skip(); pluginStats.setSkippedReason( Constant.messages.getString( "ascan.progress.label.skipped.reason.dependency")); } } } /** * Tells whether or not the given {@code plugin} was skipped (either programmatically or by the user). * * @param plugin the plugin that will be checked * @return {@code true} if plugin was skipped, {@code false} otherwise * @since 2.4.0 * @see #getSkippedReason(Plugin) */ public boolean isSkipped(Plugin plugin) { PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats != null && pluginStats.isSkipped()) { return true; } if (plugin.getTimeFinished() == null && stopReason != null) { this.pluginSkipped(plugin, stopReason); return true; } else if (this.scannerParam.getMaxRuleDurationInMins() > 0 && plugin.getTimeStarted() != null) { long endtime = System.currentTimeMillis(); if (plugin.getTimeFinished() != null) { endtime = plugin.getTimeFinished().getTime(); } if (endtime - plugin.getTimeStarted().getTime() > TimeUnit.MINUTES.toMillis(this.scannerParam.getMaxRuleDurationInMins())) { this.pluginSkipped(plugin, Constant.messages.getString("ascan.progress.label.skipped.reason.maxRule")); return true; } } return false; } /** * Gets the reason why the given plugin was skipped. * * @param plugin the plugin that will be checked * @return the reason why the given plugin was skipped, might be {@code null} if not skipped or there's no reason * @since 2.6.0 * @see #isSkipped(Plugin) */ public String getSkippedReason(Plugin plugin) { PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats == null) { return stopReason; } return pluginStats.getSkippedReason(); } /** * Complete the current plugin and update statistics * @param plugin the plugin that need to be marked as completed */ void pluginCompleted(Plugin plugin) { PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats == null) { // Plugin was not processed return; } StringBuilder sb = new StringBuilder(); if (isStop()) { sb.append("stopped host/plugin "); // ZAP: added skipping notifications } else if (pluginStats.isSkipped()) { sb.append("skipped plugin "); String reason = pluginStats.getSkippedReason(); if (reason != null) { sb.append('[').append(reason).append("] "); } } else { sb.append("completed host/plugin "); } sb.append(hostAndPort).append(" | ").append(plugin.getCodeName()); long startTimeMillis = pluginStats.getStartTime(); long diffTimeMillis = System.currentTimeMillis() - startTimeMillis; String diffTimeString = decimalFormat.format(diffTimeMillis / 1000.0); sb.append(" in ").append(diffTimeString).append('s'); sb.append(" with ").append(pluginStats.getMessageCount()).append(" message(s) sent"); sb.append(" and ").append(pluginStats.getAlertCount()).append(" alert(s) raised."); // Probably too verbose evaluate 4 the future log.info(sb.toString()); pluginFactory.setRunningPluginCompleted(plugin); notifyHostProgress(null); // ZAP: update progress as finished pluginStats.setProgress(nodeInScopeCount); } /** * Gets the knowledge base of the current scan. * * @return the knowledge base of the current scan, never {@code null}. */ Kb getKb() { if (kb == null) { kb = new Kb(); } return kb; } protected ScannerParam getScannerParam() { return scannerParam; } public List<Plugin> getPending() { return this.pluginFactory.getPending(); } public List<Plugin> getRunning() { return this.pluginFactory.getRunning(); } public List<Plugin> getCompleted() { return this.pluginFactory.getCompleted(); } /** * Set the user to scan as. If null then the current session will be used. * @param user the user to scan as */ public void setUser(User user) { this.user = user; if (httpSender != null) { httpSender.setUser(user); } } /** * Gets the technologies to be used in the scan. * * @return the technologies, never {@code null} (since 2.6.0) * @since 2.4.0 */ public TechSet getTechSet() { return techSet; } /** * Sets the technologies to be used in the scan. * * @param techSet the technologies to be used during the scan * @since 2.4.0 * @throws IllegalArgumentException (since 2.6.0) if the given parameter is {@code null}. */ public void setTechSet(TechSet techSet) { if (techSet == null) { throw new IllegalArgumentException("Parameter techSet must not be null."); } this.techSet = techSet; } /** * ZAP: abstract plugin will call this method in order to invoke any extensions that have hooked into the active scanner * @param msg the message being scanned * @param plugin the plugin being run */ protected synchronized void performScannerHookBeforeScan(HttpMessage msg, AbstractPlugin plugin) { Iterator<ScannerHook> iter = this.parentScanner.getScannerHooks().iterator(); while(iter.hasNext()){ ScannerHook hook = iter.next(); if(hook != null) { try { hook.beforeScan(msg, plugin, this.parentScanner); } catch (Exception e) { log.info("An exception occurred while trying to call beforeScan(msg, plugin) for one of the ScannerHooks: " + e.getMessage(), e); } } } } /** * ZAP: abstract plugin will call this method in order to invoke any extensions that have hooked into the active scanner * @param msg the message being scanned * @param plugin the plugin being run */ protected synchronized void performScannerHookAfterScan(HttpMessage msg, AbstractPlugin plugin) { Iterator<ScannerHook> iter = this.parentScanner.getScannerHooks().iterator(); while(iter.hasNext()){ ScannerHook hook = iter.next(); if(hook != null) { try { hook.afterScan(msg, plugin, this.parentScanner); } catch (Exception e) { log.info("An exception occurred while trying to call afterScan(msg, plugin) for one of the ScannerHooks: " + e.getMessage(), e); } } } } public String getHostAndPort() { return this.hostAndPort; } /** * @deprecated (2.5.0) No longer used/needed, Plugin's request count is automatically updated/maintained by * {@code HostProcess}. * @param pluginId the ID of the plugin * @param reqCount the number of requests sent */ @Deprecated public void setPluginRequestCount(int pluginId, int reqCount) { // No longer used. } /** * Gets the request count of the plugin with the given ID. * * @param pluginId the ID of the plugin * @return the request count * @since 2.4.3 * @see #getRequestCount() */ public int getPluginRequestCount(int pluginId) { PluginStats pluginStats = mapPluginStats.get(pluginId); if (pluginStats != null) { return pluginStats.getMessageCount(); } return 0; } /** * Gets the count of requests sent (and received) by all {@code Plugin}s and the {@code Analyser}. * * @return the count of request sent * @since 2.5.0 * @see #getPluginRequestCount(int) * @see #getAnalyser() */ public int getRequestCount() { synchronized (mapPluginStats) { int count = requestCount + getAnalyser().getRequestCount(); for (PluginStats stats : mapPluginStats.values()) { count += stats.getMessageCount(); } return count; } } /** * Gets the stats of the {@code Plugin} with the given ID. * * @param pluginId the ID of the plugin. * @return the stats of the plugin, or {@code null} if not found. * @since 2.7.0 */ public PluginStats getPluginStats(int pluginId) { synchronized (mapPluginStats) { return mapPluginStats.get(pluginId); } } /** * An action to be executed for each node traversed during the scan. * * @see #apply(StructuralNode) */ @FunctionalInterface private interface TraverseAction { /** * Applies an action to the node traversed. * * @param node the node being traversed */ void apply(StructuralNode node); } }
src/org/parosproxy/paros/core/scanner/HostProcess.java
/* * * Paros and its related class files. * * Paros is an HTTP/HTTPS proxy for assessing web application security. * Copyright (C) 2003-2004 Chinotec Technologies Company * * This program is free software; you can redistribute it and/or * modify it under the terms of the Clarified Artistic License * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * Clarified Artistic License for more details. * * You should have received a copy of the Clarified Artistic License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ // ZAP: 2011/05/15 Support for exclusions // ZAP: 2011/08/30 Support for scanner levels // ZAP: 2012/02/18 Dont log errors for temporary hrefs // ZAP: 2012/03/15 Changed the method getPathRegex to use the class StringBuilder // instead of StringBuffer and replaced some string concatenations with calls // to the method append of the class StringBuilder. Removed unnecessary castings // in the methods scanSingleNode, notifyHostComplete and pluginCompleted. Changed // the methods processPlugin and pluginCompleted to use Long.valueOf instead of // creating a new Long. // ZAP: 2012/04/25 Added @Override annotation to the appropriate method. // ZAP: 2012/07/30 Issue 43: Added support for Scope // ZAP: 2012/08/07 Issue 342 Support the HttpSenderListener // ZAP: 2012/08/07 Renamed Level to AlertThreshold and added support for AttackStrength // ZAP: 2012/08/31 Enabled control of AttackStrength // ZAP: 2012/11/22 Issue 421: Cleanly shut down any active scan threads on shutdown // ZAP: 2013/01/19 Issue 460 Add support for a scan progress dialog // ZAP: 2013/03/08 Added some debug logging // ZAP: 2014/01/16 Add support to plugin skipping // ZAP: 2014/02/21 Issue 1043: Custom active scan dialog // ZAP: 2014/03/23 Issue 1084: NullPointerException while selecting a node in the "Sites" tab // ZAP: 2014/04/01 Changed to set a name to created threads. // ZAP: 2014/06/23 Issue 1241: Active scanner might not report finished state when using host scanners // ZAP: 2014/06/26 Added the possibility to evaluate the current plugin/process progress // ZAP: 2014/07/07 Issue 389: Enable technology scope for scanners // ZAP: 2014/08/14 Issue 1291: 407 Proxy Authentication Required while active scanning // ZAP: 2014/10/24 Issue 1378: Revamp active scan panel // ZAP: 2014/10/25 Issue 1062: Made it possible to hook into the active scanner from extensions // ZAP: 2014/11/19 Issue 1412: Manage scan policies // ZAP: 2015/02/18 Issue 1062: Tidied up extension hooks // ZAP: 2015/04/02 Issue 321: Support multiple databases and Issue 1582: Low memory option // ZAP: 2015/04/17 A problem occur when a single node should be scanned because count start from -1 // ZAP: 2015/05/04 Issue 1566: Improve active scan's reported progress // ZAP: 2015/07/26 Issue 1618: Target Technology Not Honored // ZAP: 2015/10/29 Issue 2005: Active scanning incorrectly performed on sibling nodes // ZAP: 2015/11/27 Issue 2086: Report request counts per plugin // ZAP: 2015/12/16 Prevent HostProcess (and plugins run) from becoming in undefined state // ZAP: 2016/01/27 Prevent HostProcess from reporting progress higher than 100% // ZAP: 2016/04/21 Allow scanners to notify of messages sent (and tweak the progress and request count of each plugin) // ZAP: 2016/06/29 Allow to specify and obtain the reason why a scanner was skipped // ZAP: 2016/07/12 Do not allow techSet to be null // ZAP: 2016/07/01 Issue 2647 Support a/pscan rule configuration // ZAP: 2016/09/20 - Reorder statements to prevent (potential) NullPointerException in scanSingleNode // - JavaDoc tweaks // ZAP: 2016/11/14 Restore and deprecate old constructor, to keep binary compatibility // ZAP: 2016/12/13 Issue 2951: Support active scan rule and scan max duration // ZAP: 2016/12/20 Include the name of the user when logging the scan info // ZAP: 2017/03/20 Improve node enumeration in pre-scan phase. // ZAP: 2017/03/20 Log the number of messages sent by the scanners, when finished. // ZAP: 2017/03/25 Ensure messages to be scanned have a response. // ZAP: 2017/06/07 Scan just one node with AbstractHostPlugin (they apply to the whole host not individual messages). // ZAP: 2017/06/08 Collect messages to be scanned. // ZAP: 2017/06/15 Initialise the plugin factory immediately after starting the scan. // ZAP: 2017/06/15 Do not start following plugin if the scanner is paused. // ZAP: 2017/06/20 Log number of alerts raised by each scanner. // ZAP: 2017/07/06 Expose plugin stats. // ZAP: 2017/07/12 Tweak the method used when initialising the PluginFactory. // ZAP: 2017/07/13 Automatically skip dependent scanners (Issue 3784) // ZAP: 2017/07/18 Allow to obtain the (total) alert count. // ZAP: 2017/09/27 Allow to skip scanners by ID and don't allow to skip scanners already finished/skipped. // ZAP: 2017/10/05 Replace usage of Class.newInstance (deprecated in Java 9). // ZAP: 2017/11/29 Skip plugins if there's nothing to scan. // ZAP: 2017/12/29 Provide means to validate the redirections. // ZAP: 2018/01/01 Update initialisation of PluginStats. package org.parosproxy.paros.core.scanner; import java.io.IOException; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.common.ThreadPool; import org.parosproxy.paros.db.DatabaseException; import org.parosproxy.paros.model.HistoryReference; import org.parosproxy.paros.network.ConnectionParam; import org.parosproxy.paros.network.HttpMalformedHeaderException; import org.parosproxy.paros.network.HttpMessage; import org.parosproxy.paros.network.HttpSender; import org.zaproxy.zap.extension.ascan.ScanPolicy; import org.zaproxy.zap.extension.ruleconfig.RuleConfig; import org.zaproxy.zap.extension.ruleconfig.RuleConfigParam; import org.zaproxy.zap.model.SessionStructure; import org.zaproxy.zap.model.StructuralNode; import org.zaproxy.zap.model.TechSet; import org.zaproxy.zap.network.HttpRedirectionValidator; import org.zaproxy.zap.network.HttpRequestConfig; import org.zaproxy.zap.users.User; public class HostProcess implements Runnable { private static final Logger log = Logger.getLogger(HostProcess.class); private static final DecimalFormat decimalFormat = new java.text.DecimalFormat("###0.###"); private List<StructuralNode> startNodes = null; private boolean isStop = false; private PluginFactory pluginFactory; private ScannerParam scannerParam = null; private HttpSender httpSender = null; private ThreadPool threadPool = null; private Scanner parentScanner = null; private String hostAndPort = ""; private Analyser analyser = null; private Kb kb = null; private User user = null; private TechSet techSet; private RuleConfigParam ruleConfigParam; private String stopReason = null; /** * A {@code Map} from plugin IDs to corresponding {@link PluginStats}. * * @see #processPlugin(Plugin) */ private final Map<Integer, PluginStats> mapPluginStats = new HashMap<>(); private long hostProcessStartTime = 0; // ZAP: progress related private int nodeInScopeCount = 0; private int percentage = 0; /** * The count of requests sent by the {@code HostProcess} itself. */ private int requestCount; /** * The count of alerts raised during the scan. */ private int alertCount; /** * The ID of the message to be scanned by {@link AbstractHostPlugin}s. * <p> * As opposed to {@link AbstractAppPlugin}s, {@code AbstractHostPlugin}s just require one message to scan as they run * against the host (not individual messages/endpoints). * * @see #messagesIdsToAppScan */ private int messageIdToHostScan; /** * The IDs of the messages to be scanned by {@link AbstractAppPlugin}s. * * @see #messageIdToHostScan */ private List<Integer> messagesIdsToAppScan; /** * The HTTP request configuration, uses a {@link HttpRedirectionValidator} that ensures the followed redirections are in * scan's scope. * <p> * Lazily initialised. * * @see #getRedirectRequestConfig() * @see #redirectionValidator */ private HttpRequestConfig redirectRequestConfig; /** * The redirection validator that ensures the followed redirections are in scan's scope. * <p> * Lazily initialised. * * @see #getRedirectionValidator() * @see #redirectRequestConfig */ private HttpRedirectionValidator redirectionValidator; /** * Constructs a {@code HostProcess}, with no rules' configurations. * * @param hostAndPort the host:port value of the site that need to be processed * @param parentScanner the scanner instance which instantiated this process * @param scannerParam the session scanner parameters * @param connectionParam the connection parameters * @param scanPolicy the scan policy * @deprecated Use {@link #HostProcess(String, Scanner, ScannerParam, ConnectionParam, ScanPolicy, RuleConfigParam)} * instead. It will be removed in a future version. */ @Deprecated public HostProcess(String hostAndPort, Scanner parentScanner, ScannerParam scannerParam, ConnectionParam connectionParam, ScanPolicy scanPolicy) { this(hostAndPort, parentScanner, scannerParam, connectionParam, scanPolicy, null); } /** * Constructs a {@code HostProcess}. * * @param hostAndPort the host:port value of the site that need to be processed * @param parentScanner the scanner instance which instantiated this process * @param scannerParam the session scanner parameters * @param connectionParam the connection parameters * @param scanPolicy the scan policy * @param ruleConfigParam the rules' configurations, might be {@code null}. * @since 2.6.0 */ public HostProcess(String hostAndPort, Scanner parentScanner, ScannerParam scannerParam, ConnectionParam connectionParam, ScanPolicy scanPolicy, RuleConfigParam ruleConfigParam) { super(); this.hostAndPort = hostAndPort; this.parentScanner = parentScanner; this.scannerParam = scannerParam; this.pluginFactory = scanPolicy.getPluginFactory().clone(); this.ruleConfigParam = ruleConfigParam; this.messageIdToHostScan = -1; this.messagesIdsToAppScan = new ArrayList<>(); httpSender = new HttpSender(connectionParam, true, HttpSender.ACTIVE_SCANNER_INITIATOR); httpSender.setUser(this.user); httpSender.setRemoveUserDefinedAuthHeaders(true); int maxNumberOfThreads; if (scannerParam.getHandleAntiCSRFTokens()) { // Single thread if handling anti CSRF tokens, otherwise token requests might get out of step maxNumberOfThreads = 1; } else { maxNumberOfThreads = scannerParam.getThreadPerHost(); } threadPool = new ThreadPool(maxNumberOfThreads, "ZAP-ActiveScanner-"); this.techSet = TechSet.AllTech; } /** * Set the initial starting node. * Should be set after the HostProcess initialization * @param startNode the start node we should start from */ public void setStartNode(StructuralNode startNode) { this.startNodes = new ArrayList<StructuralNode>(); this.startNodes.add(startNode); } public void addStartNode(StructuralNode startNode) { if (this.startNodes == null) { this.startNodes = new ArrayList<StructuralNode>(); } this.startNodes.add(startNode); } /** * Stop the current scanning process */ public void stop() { isStop = true; getAnalyser().stop(); } /** * Main execution method */ @Override public void run() { log.debug("HostProcess.run"); try { hostProcessStartTime = System.currentTimeMillis(); // Initialise plugin factory to report the state of the plugins ASAP. pluginFactory.reset(); synchronized (mapPluginStats) { for (Plugin plugin : pluginFactory.getPending()) { mapPluginStats.put(plugin.getId(), new PluginStats(plugin.getName())); } } for (StructuralNode startNode : startNodes) { traverse(startNode, true, node -> { if (canScanNode(node)) { messagesIdsToAppScan.add(node.getHistoryReference().getHistoryId()); } }); getAnalyser().start(startNode); } nodeInScopeCount = messagesIdsToAppScan.size(); if (!messagesIdsToAppScan.isEmpty()) { messageIdToHostScan = messagesIdsToAppScan.get(0); } logScanInfo(); Plugin plugin; while (!isStop() && pluginFactory.existPluginToRun()) { checkPause(); if (isStop()) { break; } plugin = pluginFactory.nextPlugin(); if (plugin != null) { plugin.setDelayInMs(this.scannerParam.getDelayInMs()); plugin.setTechSet(this.techSet); processPlugin(plugin); } else { // waiting for dependency - no test ready yet Util.sleep(1000); } } threadPool.waitAllThreadComplete(300000); } catch (Exception e) { log.error("An error occurred while active scanning:", e); stop(); } finally { notifyHostProgress(null); notifyHostComplete(); getHttpSender().shutdown(); } } /** * Logs information about the scan. * <p> * It logs the {@link #nodeInScopeCount number of nodes} that will be scanned and the name of the {@link #user}, if any. */ private void logScanInfo() { StringBuilder strBuilder = new StringBuilder(150); if (nodeInScopeCount != 0) { strBuilder.append("Scanning "); strBuilder.append(nodeInScopeCount); strBuilder.append(" node(s) "); } else { strBuilder.append("No nodes to scan "); } if (parentScanner.getJustScanInScope()) { strBuilder.append("[just in scope] "); } strBuilder.append("from ").append(hostAndPort); if (user != null) { strBuilder.append(" as "); strBuilder.append(user.getName()); } if (nodeInScopeCount == 0) { strBuilder.append(", skipping all plugins."); } log.info(strBuilder.toString()); } private void processPlugin(final Plugin plugin) { mapPluginStats.get(plugin.getId()).start(); if (nodeInScopeCount == 0) { pluginSkipped(plugin, Constant.messages.getString("ascan.progress.label.skipped.reason.nonodes")); pluginCompleted(plugin); return; } else if (!plugin.targets(techSet)) { pluginSkipped(plugin, Constant.messages.getString("ascan.progress.label.skipped.reason.techs")); pluginCompleted(plugin); return; } log.info("start host " + hostAndPort + " | " + plugin.getCodeName() + " strength " + plugin.getAttackStrength() + " threshold " + plugin.getAlertThreshold()); if (plugin instanceof AbstractHostPlugin) { checkPause(); if (isStop() || isSkipped(plugin) || !scanMessage(plugin, messageIdToHostScan)) { // Mark the plugin as completed if it was not run so the scan process can continue as expected. // The plugin might not be run, for example, if there was an error reading the message form DB. pluginCompleted(plugin); } } else if (plugin instanceof AbstractAppPlugin) { try { for (int messageId : messagesIdsToAppScan) { checkPause(); if (isStop() || isSkipped(plugin)) { return; } scanMessage(plugin, messageId); } threadPool.waitAllThreadComplete(600000); } finally { pluginCompleted(plugin); } } } private void traverse(StructuralNode node, boolean incRelatedSiblings, TraverseAction action) { if (node == null || isStop()) { return; } Set<StructuralNode> parentNodes = new HashSet<>(); parentNodes.add(node); action.apply(node); if (parentScanner.scanChildren()) { if (incRelatedSiblings) { // Also match siblings with the same hierarchic name // If we dont do this http://localhost/start might match the GET variant // in the Sites tree and miss the hierarchic node. // Note that this is only done for the top level try { Iterator<StructuralNode> iter = node.getParent().getChildIterator(); String nodeName = SessionStructure.getCleanRelativeName(node, false); while (iter.hasNext()) { StructuralNode sibling = iter.next(); if (! node.isSameAs(sibling) && nodeName.equals( SessionStructure.getCleanRelativeName(sibling, false))) { log.debug("traverse: including related sibling " + sibling.getName()); parentNodes.add(sibling); } } } catch (DatabaseException e) { // Ignore - if we cant connect to the db there will be plenty of other errors logged ;) } } for (StructuralNode pNode : parentNodes) { Iterator<StructuralNode> iter = pNode.getChildIterator(); while (iter.hasNext() && !isStop()) { checkPause(); try { traverse(iter.next(), false, action); } catch (Exception e) { log.error(e.getMessage(), e); } } } } } protected boolean nodeInScope(String nodeName) { return parentScanner.isInScope(nodeName); } /** * Scans the message with the given ID with the given plugin. * <p> * It's used a new instance of the given plugin. * * @param plugin the scanner * @param messageId the ID of the message. * @return {@code true} if the {@code plugin} was run, {@code false} otherwise. */ private boolean scanMessage(Plugin plugin, int messageId) { Plugin test; HistoryReference historyReference; HttpMessage msg; try { historyReference = new HistoryReference(messageId, true); msg = historyReference.getHttpMessage(); } catch (HttpMalformedHeaderException | DatabaseException e) { log.warn("Failed to read message with ID [" + messageId + "], cause: " + e.getMessage()); return false; } try { // Ensure the temporary nodes, added automatically to Sites tree, have a response. // The scanners might base the logic/attacks on the state of the response (e.g. status code). if (msg.getResponseHeader().isEmpty()) { msg = msg.cloneRequest(); if (!obtainResponse(historyReference, msg)) { return false; } } if (log.isDebugEnabled()) { log.debug("scanSingleNode node plugin=" + plugin.getName() + " node=" + historyReference.getURI().toString()); } test = plugin.getClass().getDeclaredConstructor().newInstance(); test.setConfig(plugin.getConfig()); if (this.ruleConfigParam != null) { // Set the configuration rules for (RuleConfig rc : this.ruleConfigParam.getAllRuleConfigs()) { test.getConfig().setProperty(rc.getKey(), rc.getValue()); } } test.setDelayInMs(plugin.getDelayInMs()); test.setDefaultAlertThreshold(plugin.getAlertThreshold()); test.setDefaultAttackStrength(plugin.getAttackStrength()); test.setTechSet(getTechSet()); test.init(msg, this); notifyHostProgress(plugin.getName() + ": " + msg.getRequestHeader().getURI().toString()); } catch (Exception e) { log.error(e.getMessage() + " " + historyReference.getURI().toString(), e); return false; } Thread thread; do { if (this.isStop()) { return false; } thread = threadPool.getFreeThreadAndRun(test); if (thread == null) { Util.sleep(200); } } while (thread == null); mapPluginStats.get(plugin.getId()).incProgress(); return true; } private boolean obtainResponse(HistoryReference hRef, HttpMessage message) { try { getHttpSender().sendAndReceive(message); notifyNewMessage(message); requestCount++; return true; } catch (IOException e) { log.warn( "Failed to obtain the HTTP response for href [id=" + hRef.getHistoryId() + ", type=" + hRef.getHistoryType() + ", URL=" + hRef.getURI() + "]: " + e.getMessage()); return false; } } /** * Tells whether or not the scanner can scan the given node. * <p> * A node must not be null, must contain a valid HistoryReference and be in scope. * * @param node the node to be checked * @return {@code true} if the node can be scanned, {@code false} otherwise. */ private boolean canScanNode(StructuralNode node) { if (node == null) { if (log.isDebugEnabled()) { log.debug("Ignoring null node"); } return false; } HistoryReference hRef = node.getHistoryReference(); if (hRef == null) { if (log.isDebugEnabled()) { log.debug("Ignoring null history reference for node: " + node.getName()); } return false; } if (HistoryReference.TYPE_SCANNER == hRef.getHistoryType()) { if (log.isDebugEnabled()) { log.debug("Ignoring \"scanner\" type href [id=" + hRef.getHistoryId() + ", URL=" + hRef.getURI() + "]"); } return false; } if (!nodeInScope(node.getName())) { if (log.isDebugEnabled()) { log.debug("Ignoring node not in scope: " + node.getName()); } return false; } return true; } /** * ZAP: method to get back the number of tests that need to be performed * @return the number of tests that need to be executed for this Scanner */ public int getTestTotalCount() { return nodeInScopeCount; } /** * ZAP: method to get back the current progress status of a specific plugin * @param plugin the plugin we're asking the progress * @return the current managed test count */ public int getTestCurrentCount(Plugin plugin) { PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats == null) { return 0; } return pluginStats.getProgress(); } /** * @deprecated (2.5.0) No longer used/needed, Plugin's progress is automatically updated/maintained by * {@code HostProcess}. * @param plugin unused * @param value unused */ @Deprecated public void setTestCurrentCount(Plugin plugin, int value) { // No longer used. } /** * @return Returns the httpSender. */ public HttpSender getHttpSender() { return httpSender; } /** * Check if the current host scan has been stopped * @return true if the process has been stopped */ public boolean isStop() { if (this.scannerParam.getMaxScanDurationInMins() > 0) { if (System.currentTimeMillis() - this.hostProcessStartTime > TimeUnit.MINUTES.toMillis(this.scannerParam.getMaxScanDurationInMins())) { this.stopReason = Constant.messages.getString("ascan.progress.label.skipped.reason.maxScan"); this.stop(); } } return (isStop || parentScanner.isStop()); } /** * Check if the current host scan has been paused * @return true if the process has been paused */ public boolean isPaused() { return parentScanner.isPaused(); } private void checkPause() { while (parentScanner.isPaused() && !isStop()) { Util.sleep(500); } } public int getPercentageComplete () { return this.percentage; } private void notifyHostProgress(String msg) { if (pluginFactory.totalPluginToRun() == 0) { percentage = 100; } else { int numberRunning = 0; double progressRunning = 0; for (Plugin plugin : pluginFactory.getRunning()) { int scannedNodes = getTestCurrentCount(plugin); double pluginPercentage = (scannedNodes * 100.0) / getTestTotalCount(); if (pluginPercentage >= 100) { // More nodes are being scanned that the ones enumerated at the beginning... // Update global count and... nodeInScopeCount = scannedNodes; // make sure not return 100 (or more). pluginPercentage = 99; } progressRunning += pluginPercentage; numberRunning++; } int avgRunning = (int) (progressRunning / numberRunning); percentage = ((100 * pluginFactory.totalPluginCompleted()) + avgRunning) / pluginFactory.totalPluginToRun(); } parentScanner.notifyHostProgress(hostAndPort, msg, percentage); } private void notifyHostComplete() { long diffTimeMillis = System.currentTimeMillis() - hostProcessStartTime; String diffTimeString = decimalFormat.format(diffTimeMillis / 1000.0) + "s"; log.info("completed host " + hostAndPort + " in " + diffTimeString); parentScanner.notifyHostComplete(hostAndPort); } /** * Notifies interested parties that a new message was sent (and received). * <p> * {@link Plugin Plugins} should call {@link #notifyNewMessage(Plugin)} or {@link #notifyNewMessage(Plugin, HttpMessage)}, * instead. * * @param msg the new HTTP message * @since 1.2.0 */ public void notifyNewMessage(HttpMessage msg) { parentScanner.notifyNewMessage(msg); } /** * Notifies that the given {@code plugin} sent (and received) the given HTTP message. * * @param plugin the plugin that sent the message * @param message the message sent * @throws IllegalArgumentException if the given {@code plugin} is {@code null}. * @since 2.5.0 * @see #notifyNewMessage(Plugin) */ public void notifyNewMessage(Plugin plugin, HttpMessage message) { parentScanner.notifyNewMessage(message); notifyNewMessage(plugin); } /** * Notifies that the given {@code plugin} sent (and received) a non-HTTP message. * <p> * The call to this method has no effect if there's no {@code Plugin} with the given ID (or, it was not yet started). * * @param plugin the plugin that sent a non-HTTP message * @throws IllegalArgumentException if the given parameter is {@code null}. * @since 2.5.0 * @see #notifyNewMessage(Plugin, HttpMessage) */ public void notifyNewMessage(Plugin plugin) { if (plugin == null) { throw new IllegalArgumentException("Parameter plugin must not be null."); } PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats != null) { pluginStats.incMessageCount(); } } public void alertFound(Alert alert) { parentScanner.notifyAlertFound(alert); PluginStats pluginStats = mapPluginStats.get(alert.getPluginId()); if (pluginStats != null) { pluginStats.incAlertCount(); } alertCount++; } /** * Gets the alert count. * * @return the alert count. * @since 2.7.0 */ public int getAlertCount() { return alertCount; } /** * Give back the current process's Analyzer * @return the HTTP analyzer */ public Analyser getAnalyser() { if (analyser == null) { analyser = new Analyser(getHttpSender(), this); } return analyser; } /** * Gets the HTTP request configuration that ensures the followed redirections are in scan's scope. * * @return the HTTP request configuration, never {@code null}. * @since TODO add version * @see #getRedirectionValidator() */ HttpRequestConfig getRedirectRequestConfig() { if (redirectRequestConfig == null) { redirectRequestConfig = HttpRequestConfig.builder().setRedirectionValidator(getRedirectionValidator()).build(); } return redirectRequestConfig; } /** * Gets the redirection validator that ensures the followed redirections are in scan's scope. * * @return the redirection validator, never {@code null}. * @since TODO add version * @see #getRedirectRequestConfig() */ HttpRedirectionValidator getRedirectionValidator() { if (redirectionValidator == null) { redirectionValidator = redirection -> { if (!nodeInScope(redirection.getEscapedURI())) { if (log.isDebugEnabled()) { log.debug("Skipping redirection out of scan's scope: " + redirection); } return false; } return true; }; } return redirectionValidator; } public boolean handleAntiCsrfTokens() { return this.scannerParam.getHandleAntiCSRFTokens(); } /** * Skips the given plugin. * <p> * <strong>Note:</strong> Whenever possible callers should use {@link #pluginSkipped(Plugin, String)} instead. * * @param plugin the plugin that will be skipped, must not be {@code null} * @since 2.4.0 */ public void pluginSkipped(Plugin plugin) { pluginSkipped(plugin, null); } /** * Skips the plugin with the given ID with the given {@code reason}. * <p> * Ideally the {@code reason} should be internationalised as it is shown in the GUI. * * @param pluginId the ID of the plugin that will be skipped. * @param reason the reason why the plugin was skipped, might be {@code null}. * @since 2.7.0 * @see #pluginSkipped(Plugin, String) */ public void pluginSkipped(int pluginId, String reason) { Plugin plugin = pluginFactory.getPlugin(pluginId); if (plugin == null) { return; } pluginSkipped(plugin, reason); } /** * Skips the given {@code plugin} with the given {@code reason}. * <p> * Ideally the {@code reason} should be internationalised as it is shown in the GUI. * * @param plugin the plugin that will be skipped, must not be {@code null} * @param reason the reason why the plugin was skipped, might be {@code null} * @since 2.6.0 */ public void pluginSkipped(Plugin plugin, String reason) { if (isStop()) { return; } PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats == null || pluginStats.isSkipped() || pluginFactory.getCompleted().contains(plugin)) { return; } pluginStats.skip(); pluginStats.setSkippedReason(reason); for (Plugin dependent : pluginFactory.getDependentPlugins(plugin)) { pluginStats = mapPluginStats.get(dependent.getId()); if (pluginStats != null && !pluginStats.isSkipped() && !pluginFactory.getCompleted().contains(dependent)) { pluginStats.skip(); pluginStats.setSkippedReason( Constant.messages.getString( "ascan.progress.label.skipped.reason.dependency")); } } } /** * Tells whether or not the given {@code plugin} was skipped (either programmatically or by the user). * * @param plugin the plugin that will be checked * @return {@code true} if plugin was skipped, {@code false} otherwise * @since 2.4.0 * @see #getSkippedReason(Plugin) */ public boolean isSkipped(Plugin plugin) { PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats != null && pluginStats.isSkipped()) { return true; } if (plugin.getTimeFinished() == null && stopReason != null) { this.pluginSkipped(plugin, stopReason); return true; } else if (this.scannerParam.getMaxRuleDurationInMins() > 0 && plugin.getTimeStarted() != null) { long endtime = System.currentTimeMillis(); if (plugin.getTimeFinished() != null) { endtime = plugin.getTimeFinished().getTime(); } if (endtime - plugin.getTimeStarted().getTime() > TimeUnit.MINUTES.toMillis(this.scannerParam.getMaxRuleDurationInMins())) { this.pluginSkipped(plugin, Constant.messages.getString("ascan.progress.label.skipped.reason.maxRule")); return true; } } return false; } /** * Gets the reason why the given plugin was skipped. * * @param plugin the plugin that will be checked * @return the reason why the given plugin was skipped, might be {@code null} if not skipped or there's no reason * @since 2.6.0 * @see #isSkipped(Plugin) */ public String getSkippedReason(Plugin plugin) { PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats == null) { return stopReason; } return pluginStats.getSkippedReason(); } /** * Complete the current plugin and update statistics * @param plugin the plugin that need to be marked as completed */ void pluginCompleted(Plugin plugin) { PluginStats pluginStats = mapPluginStats.get(plugin.getId()); if (pluginStats == null) { // Plugin was not processed return; } StringBuilder sb = new StringBuilder(); if (isStop()) { sb.append("stopped host/plugin "); // ZAP: added skipping notifications } else if (pluginStats.isSkipped()) { sb.append("skipped plugin "); String reason = pluginStats.getSkippedReason(); if (reason != null) { sb.append('[').append(reason).append("] "); } } else { sb.append("completed host/plugin "); } sb.append(hostAndPort).append(" | ").append(plugin.getCodeName()); long startTimeMillis = pluginStats.getStartTime(); long diffTimeMillis = System.currentTimeMillis() - startTimeMillis; String diffTimeString = decimalFormat.format(diffTimeMillis / 1000.0); sb.append(" in ").append(diffTimeString).append('s'); sb.append(" with ").append(pluginStats.getMessageCount()).append(" message(s) sent"); sb.append(" and ").append(pluginStats.getAlertCount()).append(" alert(s) raised."); // Probably too verbose evaluate 4 the future log.info(sb.toString()); pluginFactory.setRunningPluginCompleted(plugin); notifyHostProgress(null); // ZAP: update progress as finished pluginStats.setProgress(nodeInScopeCount); } /** * Gets the knowledge base of the current scan. * * @return the knowledge base of the current scan, never {@code null}. */ Kb getKb() { if (kb == null) { kb = new Kb(); } return kb; } protected ScannerParam getScannerParam() { return scannerParam; } public List<Plugin> getPending() { return this.pluginFactory.getPending(); } public List<Plugin> getRunning() { return this.pluginFactory.getRunning(); } public List<Plugin> getCompleted() { return this.pluginFactory.getCompleted(); } /** * Set the user to scan as. If null then the current session will be used. * @param user the user to scan as */ public void setUser(User user) { this.user = user; if (httpSender != null) { httpSender.setUser(user); } } /** * Gets the technologies to be used in the scan. * * @return the technologies, never {@code null} (since 2.6.0) * @since 2.4.0 */ public TechSet getTechSet() { return techSet; } /** * Sets the technologies to be used in the scan. * * @param techSet the technologies to be used during the scan * @since 2.4.0 * @throws IllegalArgumentException (since 2.6.0) if the given parameter is {@code null}. */ public void setTechSet(TechSet techSet) { if (techSet == null) { throw new IllegalArgumentException("Parameter techSet must not be null."); } this.techSet = techSet; } /** * ZAP: abstract plugin will call this method in order to invoke any extensions that have hooked into the active scanner * @param msg the message being scanned * @param plugin the plugin being run */ protected synchronized void performScannerHookBeforeScan(HttpMessage msg, AbstractPlugin plugin) { Iterator<ScannerHook> iter = this.parentScanner.getScannerHooks().iterator(); while(iter.hasNext()){ ScannerHook hook = iter.next(); if(hook != null) { try { hook.beforeScan(msg, plugin, this.parentScanner); } catch (Exception e) { log.info("An exception occurred while trying to call beforeScan(msg, plugin) for one of the ScannerHooks: " + e.getMessage(), e); } } } } /** * ZAP: abstract plugin will call this method in order to invoke any extensions that have hooked into the active scanner * @param msg the message being scanned * @param plugin the plugin being run */ protected synchronized void performScannerHookAfterScan(HttpMessage msg, AbstractPlugin plugin) { Iterator<ScannerHook> iter = this.parentScanner.getScannerHooks().iterator(); while(iter.hasNext()){ ScannerHook hook = iter.next(); if(hook != null) { try { hook.afterScan(msg, plugin, this.parentScanner); } catch (Exception e) { log.info("An exception occurred while trying to call afterScan(msg, plugin) for one of the ScannerHooks: " + e.getMessage(), e); } } } } public String getHostAndPort() { return this.hostAndPort; } /** * @deprecated (2.5.0) No longer used/needed, Plugin's request count is automatically updated/maintained by * {@code HostProcess}. * @param pluginId the ID of the plugin * @param reqCount the number of requests sent */ @Deprecated public void setPluginRequestCount(int pluginId, int reqCount) { // No longer used. } /** * Gets the request count of the plugin with the given ID. * * @param pluginId the ID of the plugin * @return the request count * @since 2.4.3 * @see #getRequestCount() */ public int getPluginRequestCount(int pluginId) { PluginStats pluginStats = mapPluginStats.get(pluginId); if (pluginStats != null) { return pluginStats.getMessageCount(); } return 0; } /** * Gets the count of requests sent (and received) by all {@code Plugin}s and the {@code Analyser}. * * @return the count of request sent * @since 2.5.0 * @see #getPluginRequestCount(int) * @see #getAnalyser() */ public int getRequestCount() { synchronized (mapPluginStats) { int count = requestCount + getAnalyser().getRequestCount(); for (PluginStats stats : mapPluginStats.values()) { count += stats.getMessageCount(); } return count; } } /** * Gets the stats of the {@code Plugin} with the given ID. * * @param pluginId the ID of the plugin. * @return the stats of the plugin, or {@code null} if not found. * @since 2.7.0 */ public PluginStats getPluginStats(int pluginId) { synchronized (mapPluginStats) { return mapPluginStats.get(pluginId); } } /** * An action to be executed for each node traversed during the scan. * * @see #apply(StructuralNode) */ @FunctionalInterface private interface TraverseAction { /** * Applies an action to the node traversed. * * @param node the node being traversed */ void apply(StructuralNode node); } }
Log alert count when host process completes Change HostProcess to also log the alert count when it completes, easier to know how many alerts were raised during the scan (without needing to check each of the scanners).
src/org/parosproxy/paros/core/scanner/HostProcess.java
Log alert count when host process completes
Java
apache-2.0
18dcddf0dfe8cb2baf9126c4c38e3a29c2369815
0
OmniLayer/OmniJ,dexX7/OmniJ,dexX7/OmniJ,OmniLayer/OmniJ,dexX7/bitcoin-spock,OmniLayer/OmniJ,dexX7/bitcoin-spock
package com.msgilligan.bitcoin.rpc; import com.google.bitcoin.core.Address; import com.google.bitcoin.core.AddressFormatException; import com.google.bitcoin.core.NetworkParameters; import com.google.bitcoin.core.Sha256Hash; import com.google.bitcoin.core.Transaction; import com.google.bitcoin.params.RegTestParams; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigDecimal; import java.net.SocketException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Formatter; import java.util.List; import java.util.Map; /** * JSON-RPC Client for bitcoind */ public class BitcoinClient extends RPCClient { private static final Integer SECOND = 1000; public BitcoinClient(URL server, String rpcuser, String rpcpassword) { super(server, rpcuser, rpcpassword); } public BitcoinClient(RPCConfig config) throws IOException { this(config.getUrl(), config.getUsername(), config.getPassword()); } /** * * @param timeout Timeout in seconds * @return */ public Boolean waitForServer(Integer timeout) { Integer seconds = 0; System.out.println("Waiting for server RPC ready..."); Integer block; while ( seconds < timeout ) { try { block = this.getBlockCount(); if (block != null ) { System.out.println("\nRPC Ready."); return true; } } catch (SocketException se ) { // These are expected exceptions while waiting for a server if (! ( se.getMessage().equals("Unexpected end of file from server") || se.getMessage().equals("Connection reset") || se.getMessage().equals("Connection refused") || se.getMessage().equals("recvfrom failed: ECONNRESET (Connection reset by peer)"))) { se.printStackTrace(); } } catch (java.io.EOFException e) { /* Android exception, ignore */ // Expected exceptions on Android, RoboVM } catch (JsonRPCException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } try { System.out.print("."); seconds++; if (seconds % 60 == 0) { System.out.println(); } Thread.sleep(SECOND); } catch (InterruptedException e) { e.printStackTrace(); } } return false; } /** * * @param timeout Timeout in seconds * @return */ public Boolean waitForSync(Long blockCount, Integer timeout) throws JsonRPCException, IOException { Integer seconds = 0; System.out.println("Waiting for server to get to block " + blockCount); Integer block; while ( seconds < timeout ) { block = this.getBlockCount(); if (block >= blockCount ) { System.out.println("Server is at block " + block + " returning 'true'."); return true; } else { try { seconds++; if (seconds % 60 == 0) { System.out.println("Server at block " + block); } Thread.sleep(SECOND); } catch (InterruptedException e) { e.printStackTrace(); } } } return false; } /** * * @return * @throws IOException */ public Integer getBlockCount() throws JsonRPCException, IOException { Map<String, Object> response = send("getblockcount", null); Integer blockCount = (Integer) response.get("result"); return blockCount; } /** * * @param generate turn generation on or off * @param genproclimit Generation is limited to [genproclimit] processors, -1 is unlimited * in regtest mode genproclimit is number of blocks to generate immediately * @throws IOException */ public void setGenerate(Boolean generate, Long genproclimit) throws JsonRPCException, IOException { List<Object> params = createParamList(generate, genproclimit); Map<String, Object> response = send("setgenerate", params); String result = (String) response.get("result"); assert result == null; } public void generateBlock() throws JsonRPCException, IOException { generateBlocks(1L); } public void generateBlocks(Long blocks) throws JsonRPCException, IOException { setGenerate(true, blocks); } public Address getNewAddress() throws JsonRPCException, IOException { return getNewAddress(null); } public Address getNewAddress(String account) throws JsonRPCException, IOException { List<Object> params = createParamList(account); Map<String, Object> response = send("getnewaddress", null); String addr = (String) response.get("result"); Address address = null; try { address = new Address(null, addr); } catch (AddressFormatException e) { throw new RuntimeException(e); } return address; } public Address getAccountAddress(String account) throws JsonRPCException, IOException { List<Object> params = createParamList(account); Map<String, Object> response = send("getaccountaddress", params); @SuppressWarnings("unchecked") String addr = (String) response.get("result"); Address address = null; try { address = new Address(null, addr); } catch (AddressFormatException e) { throw new RuntimeException(e); } return address; } public Boolean moveFunds(Address fromaccount, Address toaccount, BigDecimal amount, Integer minconf, String comment) throws JsonRPCException, IOException { List<Object> params = createParamList(fromaccount, toaccount, amount, minconf, comment); Map<String, Object> response = send("move", params); @SuppressWarnings("unchecked") Boolean result = (Boolean) response.get("result"); return result; } /** * Signs inputs of a raw transaction. * * @param unsignedTransaction The hex-encoded raw transaction * @return The signed transaction and information whether it has a complete set of signature * @throws IOException * @throws JsonRPCException */ public Map<String, Object> signRawTransaction(String unsignedTransaction) throws IOException, JsonRPCException { List<Object> params = createParamList(unsignedTransaction); Map<String, Object> response = send("signrawtransaction", params); @SuppressWarnings("unchecked") Map<String, Object> signedTransaction = (Map<String, Object>) response.get("result"); return signedTransaction; } public Object getRawTransaction(Sha256Hash txid, Boolean verbose) throws JsonRPCException, IOException { Object result; if (verbose) { result = getRawTransactionMap(txid); // Verbose means JSON } else { result = getRawTransactionBytes(txid); // Not-verbose is Binary } return result; } /* Return a BitcoinJ Transaction type */ public Transaction getRawTransaction(Sha256Hash txid) throws JsonRPCException, IOException { byte[] raw = getRawTransactionBytes(txid); // Hard-code RegTest for now // TODO: All RPC client connections should have a BitcoinJ params object? Transaction tx = new Transaction(RegTestParams.get(), raw); return tx; } public byte[] getRawTransactionBytes(Sha256Hash txid) throws JsonRPCException, IOException { List<Object> params = createParamList(txid.toString()); Map<String, Object> response = send("getrawtransaction", params); @SuppressWarnings("unchecked") String hexEncoded = (String) response.get("result"); byte[] raw = BitcoinClient.hexStringToByteArray(hexEncoded); return raw; } /* TODO: Return a stronger type than an a Map? */ public Map<String, Object> getRawTransactionMap(Sha256Hash txid) throws JsonRPCException, IOException { List<Object> params = createParamList(txid.toString(), 1); Map<String, Object> response = send("getrawtransaction", params); @SuppressWarnings("unchecked") Map<String, Object> json = (Map<String, Object>) response.get("result"); return json; } public Sha256Hash sendRawTransaction(Transaction tx) throws JsonRPCException, IOException { return sendRawTransaction(tx, null); } public Sha256Hash sendRawTransaction(Transaction tx, Boolean allowHighFees) throws JsonRPCException, IOException { String hexTx = transactionToHex(tx); List<Object> params = createParamList(hexTx, allowHighFees); Map<String, Object> response = send("sendrawtransaction", params); String txid = (String) response.get("result"); Sha256Hash hash = new Sha256Hash(txid); return hash; } public BigDecimal getReceivedByAddress(Address address) throws JsonRPCException, IOException { return getReceivedByAddress(address, 1); // Default to 1 or more confirmations } public BigDecimal getReceivedByAddress(Address address, Integer minConf) throws JsonRPCException, IOException { List<Object> params = createParamList(address.toString(), minConf); Map<String, Object> response = send("getreceivedbyaddress", params); BigDecimal balance = BigDecimal.valueOf((Double) response.get("result")); return balance; } public List<Object> listReceivedByAddress(Integer minConf, Boolean includeEmpty ) throws JsonRPCException, IOException { List<Object> params = createParamList(minConf, includeEmpty); Map<String, Object> response = send("listreceivedbyaddress", params); @SuppressWarnings("unchecked") List<Object> addresses = (List<Object>) response.get("result"); return addresses; } /** * Returns a list of unspent transaction outputs with at least one confirmation. * * @return The unspent transaction outputs * @throws JsonRPCException * @throws IOException */ public List<Map<String, Object>> listUnspent() throws JsonRPCException, IOException { return listUnspent(null, null, null); } /** * Returns a list of unspent transaction outputs with at least {@code minConf} and not more than {@code maxConf} * confirmations. * * @param minConf The minimum confirmations to filter * @param maxConf The maximum confirmations to filter * @return The unspent transaction outputs * @throws JsonRPCException * @throws IOException */ public List<Map<String, Object>> listUnspent(Integer minConf, Integer maxConf) throws JsonRPCException, IOException { return listUnspent(minConf, maxConf, null); } /** * Returns a list of unspent transaction outputs with at least {@code minConf} and not more than {@code maxConf} * confirmations, filtered by a list of addresses. * * @param minConf The minimum confirmations to filter * @param maxConf The maximum confirmations to filter * @param filter Include only transaction outputs to the specified addresses * @return The unspent transaction outputs * @throws JsonRPCException * @throws IOException */ public List<Map<String, Object>> listUnspent(Integer minConf, Integer maxConf, Iterable<Address> filter) throws JsonRPCException, IOException { List<String> addressFilter = null; if (null != filter) addressFilter = applyToString(filter); List<Object> params = createParamList(minConf, maxConf, addressFilter); Map<String, Object> response = send("listunspent", params); @SuppressWarnings("unchecked") List<Map<String, Object>> unspent = (List<Map<String, Object>>) response.get("result"); return unspent; } public BigDecimal getBalance() throws JsonRPCException, IOException { return getBalance(null, null); } public BigDecimal getBalance(String account) throws JsonRPCException, IOException { return getBalance(account, null); } public BigDecimal getBalance(String account, Integer minConf) throws JsonRPCException, IOException { List<Object> params = createParamList(account, minConf); Map<String, Object> response = send("getbalance", params); Double balanceBTCd = (Double) response.get("result"); // Beware of the new BigDecimal(double d) constructor, it results in unexpected/undesired values. BigDecimal balanceBTC = BigDecimal.valueOf(balanceBTCd); return balanceBTC; } public Sha256Hash sendToAddress(Address address, BigDecimal amount) throws JsonRPCException, IOException { return sendToAddress(address, amount, null, null); } public Sha256Hash sendToAddress(Address address, BigDecimal amount, String comment, String commentTo) throws JsonRPCException, IOException { List<Object> params = createParamList(address.toString(), amount, comment, commentTo); Map<String, Object> response = send("sendtoaddress", params); String txid = (String) response.get("result"); Sha256Hash hash = new Sha256Hash(txid); return hash; } public Sha256Hash sendFrom(String account, Address address, BigDecimal amount) throws JsonRPCException, IOException { List<Object> params = createParamList(account, address.toString(), amount); Map<String, Object> response = send("sendfrom", params); String txid = (String) response.get("result"); Sha256Hash hash = new Sha256Hash(txid); return hash; } public Sha256Hash sendMany(String account, Map<Address, BigDecimal> amounts) throws JsonRPCException, IOException { List<Object> params = Arrays.asList(account, amounts); Map<String, Object> response = send("sendmany", params); String txid = (String) response.get("result"); Sha256Hash hash = new Sha256Hash(txid); return hash; } public Map<String, Object> getTransaction(Sha256Hash txid) throws JsonRPCException, IOException { List<Object> params = createParamList(txid.toString()); Map<String, Object> response = send("gettransaction", params); @SuppressWarnings("unchecked") Map<String, Object> transaction = (Map<String, Object>) response.get("result"); return transaction; } public Map<String, Object> getInfo() throws JsonRPCException, IOException { Map<String, Object> response = send("getinfo", null); @SuppressWarnings("unchecked") Map<String, Object> result = (Map<String, Object>) response.get("result"); return result; } public static byte[] hexStringToByteArray(String s) { int len = s.length(); byte[] data = new byte[len / 2]; for (int i = 0; i < len; i += 2) { data[i / 2] = (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i+1), 16)); } return data; } /* * Create a mutable param list (so send() can remove null parameters) */ private List<Object> createParamList(Object... parameters) { List<Object> paramList = new ArrayList<Object>(Arrays.asList(parameters)); return paramList; } private String transactionToHex(Transaction tx) { // From: http://bitcoin.stackexchange.com/questions/8475/how-to-get-hex-string-from-transaction-in-bitcoinj final StringBuilder sb = new StringBuilder(); Formatter formatter = new Formatter(sb); try { ByteArrayOutputStream os = new ByteArrayOutputStream(); tx.bitcoinSerialize(os); byte[] bytes = os.toByteArray(); for (byte b : bytes) { formatter.format("%02x", b); } } catch (IOException e) { throw new RuntimeException("Can't convert Transaction to Hex String", e); } finally { formatter.close(); } return sb.toString(); } /** * Applies toString() to every element of {@code elements} and returns a list of the results. * * @param elements The elements * @return The list of strings */ private <T> List<String> applyToString(Iterable<T> elements) { List<String> stringList = new ArrayList<>(); for (T element : elements) { String elementAsString = element.toString(); stringList.add(elementAsString); } return stringList; } }
src/main/java/com/msgilligan/bitcoin/rpc/BitcoinClient.java
package com.msgilligan.bitcoin.rpc; import com.google.bitcoin.core.Address; import com.google.bitcoin.core.AddressFormatException; import com.google.bitcoin.core.NetworkParameters; import com.google.bitcoin.core.Sha256Hash; import com.google.bitcoin.core.Transaction; import com.google.bitcoin.params.RegTestParams; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigDecimal; import java.net.SocketException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.Formatter; import java.util.List; import java.util.Map; /** * JSON-RPC Client for bitcoind */ public class BitcoinClient extends RPCClient { private static final Integer SECOND = 1000; public BitcoinClient(URL server, String rpcuser, String rpcpassword) { super(server, rpcuser, rpcpassword); } public BitcoinClient(RPCConfig config) throws IOException { this(config.getUrl(), config.getUsername(), config.getPassword()); } /** * * @param timeout Timeout in seconds * @return */ public Boolean waitForServer(Integer timeout) { Integer seconds = 0; System.out.println("Waiting for server RPC ready..."); Integer block; while ( seconds < timeout ) { try { block = this.getBlockCount(); if (block != null ) { System.out.println("\nRPC Ready."); return true; } } catch (SocketException se ) { // These are expected exceptions while waiting for a server if (! ( se.getMessage().equals("Unexpected end of file from server") || se.getMessage().equals("Connection reset") || se.getMessage().equals("Connection refused") || se.getMessage().equals("recvfrom failed: ECONNRESET (Connection reset by peer)"))) { se.printStackTrace(); } } catch (java.io.EOFException e) { /* Android exception, ignore */ // Expected exceptions on Android, RoboVM } catch (JsonRPCException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } try { System.out.print("."); seconds++; if (seconds % 60 == 0) { System.out.println(); } Thread.sleep(SECOND); } catch (InterruptedException e) { e.printStackTrace(); } } return false; } /** * * @param timeout Timeout in seconds * @return */ public Boolean waitForSync(Long blockCount, Integer timeout) throws JsonRPCException, IOException { Integer seconds = 0; System.out.println("Waiting for server to get to block " + blockCount); Integer block; while ( seconds < timeout ) { block = this.getBlockCount(); if (block >= blockCount ) { System.out.println("Server is at block " + block + " returning 'true'."); return true; } else { try { seconds++; if (seconds % 60 == 0) { System.out.println("Server at block " + block); } Thread.sleep(SECOND); } catch (InterruptedException e) { e.printStackTrace(); } } } return false; } /** * * @return * @throws IOException */ public Integer getBlockCount() throws JsonRPCException, IOException { Map<String, Object> response = send("getblockcount", null); Integer blockCount = (Integer) response.get("result"); return blockCount; } /** * * @param generate turn generation on or off * @param genproclimit Generation is limited to [genproclimit] processors, -1 is unlimited * in regtest mode genproclimit is number of blocks to generate immediately * @throws IOException */ public void setGenerate(Boolean generate, Long genproclimit) throws JsonRPCException, IOException { List<Object> params = createParamList(generate, genproclimit); Map<String, Object> response = send("setgenerate", params); String result = (String) response.get("result"); assert result == null; } public void generateBlock() throws JsonRPCException, IOException { generateBlocks(1L); } public void generateBlocks(Long blocks) throws JsonRPCException, IOException { setGenerate(true, blocks); } public Address getNewAddress() throws JsonRPCException, IOException { return getNewAddress(null); } public Address getNewAddress(String account) throws JsonRPCException, IOException { List<Object> params = createParamList(account); Map<String, Object> response = send("getnewaddress", null); String addr = (String) response.get("result"); Address address = null; try { address = new Address(null, addr); } catch (AddressFormatException e) { throw new RuntimeException(e); } return address; } public Address getAccountAddress(String account) throws JsonRPCException, IOException { List<Object> params = createParamList(account); Map<String, Object> response = send("getaccountaddress", params); @SuppressWarnings("unchecked") String addr = (String) response.get("result"); Address address = null; try { address = new Address(null, addr); } catch (AddressFormatException e) { throw new RuntimeException(e); } return address; } public Boolean moveFunds(Address fromaccount, Address toaccount, BigDecimal amount, Integer minconf, String comment) throws JsonRPCException, IOException { List<Object> params = createParamList(fromaccount, toaccount, amount, minconf, comment); Map<String, Object> response = send("move", params); @SuppressWarnings("unchecked") Boolean result = (Boolean) response.get("result"); return result; } public Object getRawTransaction(Sha256Hash txid, Boolean verbose) throws JsonRPCException, IOException { Object result; if (verbose) { result = getRawTransactionMap(txid); // Verbose means JSON } else { result = getRawTransactionBytes(txid); // Not-verbose is Binary } return result; } /* Return a BitcoinJ Transaction type */ public Transaction getRawTransaction(Sha256Hash txid) throws JsonRPCException, IOException { byte[] raw = getRawTransactionBytes(txid); // Hard-code RegTest for now // TODO: All RPC client connections should have a BitcoinJ params object? Transaction tx = new Transaction(RegTestParams.get(), raw); return tx; } public byte[] getRawTransactionBytes(Sha256Hash txid) throws JsonRPCException, IOException { List<Object> params = createParamList(txid.toString()); Map<String, Object> response = send("getrawtransaction", params); @SuppressWarnings("unchecked") String hexEncoded = (String) response.get("result"); byte[] raw = BitcoinClient.hexStringToByteArray(hexEncoded); return raw; } /* TODO: Return a stronger type than an a Map? */ public Map<String, Object> getRawTransactionMap(Sha256Hash txid) throws JsonRPCException, IOException { List<Object> params = createParamList(txid.toString(), 1); Map<String, Object> response = send("getrawtransaction", params); @SuppressWarnings("unchecked") Map<String, Object> json = (Map<String, Object>) response.get("result"); return json; } public Sha256Hash sendRawTransaction(Transaction tx) throws JsonRPCException, IOException { return sendRawTransaction(tx, null); } public Sha256Hash sendRawTransaction(Transaction tx, Boolean allowHighFees) throws JsonRPCException, IOException { String hexTx = transactionToHex(tx); List<Object> params = createParamList(hexTx, allowHighFees); Map<String, Object> response = send("sendrawtransaction", params); String txid = (String) response.get("result"); Sha256Hash hash = new Sha256Hash(txid); return hash; } public BigDecimal getReceivedByAddress(Address address) throws JsonRPCException, IOException { return getReceivedByAddress(address, 1); // Default to 1 or more confirmations } public BigDecimal getReceivedByAddress(Address address, Integer minConf) throws JsonRPCException, IOException { List<Object> params = createParamList(address.toString(), minConf); Map<String, Object> response = send("getreceivedbyaddress", params); BigDecimal balance = BigDecimal.valueOf((Double) response.get("result")); return balance; } public List<Object> listReceivedByAddress(Integer minConf, Boolean includeEmpty ) throws JsonRPCException, IOException { List<Object> params = createParamList(minConf, includeEmpty); Map<String, Object> response = send("listreceivedbyaddress", params); @SuppressWarnings("unchecked") List<Object> addresses = (List<Object>) response.get("result"); return addresses; } /** * Returns a list of unspent transaction outputs with at least one confirmation. * * @return The unspent transaction outputs * @throws JsonRPCException * @throws IOException */ public List<Map<String, Object>> listUnspent() throws JsonRPCException, IOException { return listUnspent(null, null, null); } /** * Returns a list of unspent transaction outputs with at least {@code minConf} and not more than {@code maxConf} * confirmations. * * @param minConf The minimum confirmations to filter * @param maxConf The maximum confirmations to filter * @return The unspent transaction outputs * @throws JsonRPCException * @throws IOException */ public List<Map<String, Object>> listUnspent(Integer minConf, Integer maxConf) throws JsonRPCException, IOException { return listUnspent(minConf, maxConf, null); } /** * Returns a list of unspent transaction outputs with at least {@code minConf} and not more than {@code maxConf} * confirmations, filtered by a list of addresses. * * @param minConf The minimum confirmations to filter * @param maxConf The maximum confirmations to filter * @param filter Include only transaction outputs to the specified addresses * @return The unspent transaction outputs * @throws JsonRPCException * @throws IOException */ public List<Map<String, Object>> listUnspent(Integer minConf, Integer maxConf, Iterable<Address> filter) throws JsonRPCException, IOException { List<String> addressFilter = null; if (null != filter) addressFilter = applyToString(filter); List<Object> params = createParamList(minConf, maxConf, addressFilter); Map<String, Object> response = send("listunspent", params); @SuppressWarnings("unchecked") List<Map<String, Object>> unspent = (List<Map<String, Object>>) response.get("result"); return unspent; } public BigDecimal getBalance() throws JsonRPCException, IOException { return getBalance(null, null); } public BigDecimal getBalance(String account) throws JsonRPCException, IOException { return getBalance(account, null); } public BigDecimal getBalance(String account, Integer minConf) throws JsonRPCException, IOException { List<Object> params = createParamList(account, minConf); Map<String, Object> response = send("getbalance", params); Double balanceBTCd = (Double) response.get("result"); // Beware of the new BigDecimal(double d) constructor, it results in unexpected/undesired values. BigDecimal balanceBTC = BigDecimal.valueOf(balanceBTCd); return balanceBTC; } public Sha256Hash sendToAddress(Address address, BigDecimal amount) throws JsonRPCException, IOException { return sendToAddress(address, amount, null, null); } public Sha256Hash sendToAddress(Address address, BigDecimal amount, String comment, String commentTo) throws JsonRPCException, IOException { List<Object> params = createParamList(address.toString(), amount, comment, commentTo); Map<String, Object> response = send("sendtoaddress", params); String txid = (String) response.get("result"); Sha256Hash hash = new Sha256Hash(txid); return hash; } public Sha256Hash sendFrom(String account, Address address, BigDecimal amount) throws JsonRPCException, IOException { List<Object> params = createParamList(account, address.toString(), amount); Map<String, Object> response = send("sendfrom", params); String txid = (String) response.get("result"); Sha256Hash hash = new Sha256Hash(txid); return hash; } public Sha256Hash sendMany(String account, Map<Address, BigDecimal> amounts) throws JsonRPCException, IOException { List<Object> params = Arrays.asList(account, amounts); Map<String, Object> response = send("sendmany", params); String txid = (String) response.get("result"); Sha256Hash hash = new Sha256Hash(txid); return hash; } public Map<String, Object> getTransaction(Sha256Hash txid) throws JsonRPCException, IOException { List<Object> params = createParamList(txid.toString()); Map<String, Object> response = send("gettransaction", params); @SuppressWarnings("unchecked") Map<String, Object> transaction = (Map<String, Object>) response.get("result"); return transaction; } public Map<String, Object> getInfo() throws JsonRPCException, IOException { Map<String, Object> response = send("getinfo", null); @SuppressWarnings("unchecked") Map<String, Object> result = (Map<String, Object>) response.get("result"); return result; } public static byte[] hexStringToByteArray(String s) { int len = s.length(); byte[] data = new byte[len / 2]; for (int i = 0; i < len; i += 2) { data[i / 2] = (byte) ((Character.digit(s.charAt(i), 16) << 4) + Character.digit(s.charAt(i+1), 16)); } return data; } /* * Create a mutable param list (so send() can remove null parameters) */ private List<Object> createParamList(Object... parameters) { List<Object> paramList = new ArrayList<Object>(Arrays.asList(parameters)); return paramList; } private String transactionToHex(Transaction tx) { // From: http://bitcoin.stackexchange.com/questions/8475/how-to-get-hex-string-from-transaction-in-bitcoinj final StringBuilder sb = new StringBuilder(); Formatter formatter = new Formatter(sb); try { ByteArrayOutputStream os = new ByteArrayOutputStream(); tx.bitcoinSerialize(os); byte[] bytes = os.toByteArray(); for (byte b : bytes) { formatter.format("%02x", b); } } catch (IOException e) { throw new RuntimeException("Can't convert Transaction to Hex String", e); } finally { formatter.close(); } return sb.toString(); } /** * Applies toString() to every element of {@code elements} and returns a list of the results. * * @param elements The elements * @return The list of strings */ private <T> List<String> applyToString(Iterable<T> elements) { List<String> stringList = new ArrayList<>(); for (T element : elements) { String elementAsString = element.toString(); stringList.add(elementAsString); } return stringList; } }
signRawTransaction: add basis without optional parameters
src/main/java/com/msgilligan/bitcoin/rpc/BitcoinClient.java
signRawTransaction: add basis without optional parameters
Java
apache-2.0
d1ece9cba0c5eaee6716f40df130d4a709d54470
0
mcqueentc/chronix.benchmark,ChronixDB/chronix.benchmark
import de.qaware.chronix.database.*; import java.time.Duration; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.temporal.ChronoUnit; import java.time.temporal.TemporalUnit; import java.util.*; import java.util.concurrent.TimeUnit; /** * Created by mcqueen666 on 06.09.16. */ public class OpenTsdbInterface implements BenchmarkDataSource { private final String OPENTSDB_STORAGE_DIRECTORY = "/opt/data"; private final int OPENTSDB_NUMBER_OF_POINTS_PER_BATCH = 10; private String ipAddress; private int portNumber; private boolean isSetup = false; private String dbName = "chronixBenchmark"; OpenTsdb openTsdb; @Override public boolean setup(String ipAddress, int portNumber) { if(!isSetup){ try { openTsdb = new OpenTsdb.Builder("http://" + ipAddress + ":" + portNumber).create(); //TODO wait until openTsdb is up and ready // openTsdb.setBatchSizeLimit(NUMBER_OF_POINTS_PER_BATCH); // openTsdb cannot handle many points at once (o.0) openTsdb.setBatchSizeLimit(OPENTSDB_NUMBER_OF_POINTS_PER_BATCH); this.ipAddress = ipAddress; this.portNumber = portNumber; } catch (Exception e){ isSetup = false; System.err.println("Error OpenTSDB setup: " + e.getLocalizedMessage()); } } return isSetup; } @Override public boolean clean() { return false; } @Override public void shutdown() { } @Override public String getStorageDirectoryPath() { return OPENTSDB_STORAGE_DIRECTORY; } @Override public String importDataPoints(TimeSeries timeSeries) { String reply = "Error importing data points to openTsdb."; if(timeSeries != null) { Map<String, String> tags = timeSeries.getTagKey_tagValue(); openTsdb.preAssignDimensions(tags.keySet()); // create escapted metricName String metricName = openTSDBEscapeValue(timeSeries.getMetricName()); // create escapted tags Map<String, String> metaData = new HashMap<>(); for(Map.Entry<String, String> tag : tags.entrySet()){ metaData.put(tag.getKey(), openTSDBEscapeValue(tag.getValue())); } Set<OpenTsdbMetric> openTsdbMetricSet = new HashSet<>(); for(TimeSeriesPoint point : timeSeries.getPoints()){ OpenTsdbMetric openTsdbMetric = OpenTsdbMetric.named(metricName) .withTags(metaData) .withTimestamp(point.getTimeStamp()) // TODO maybe wrong time unit .withValue(point.getValue()) .build(); openTsdbMetricSet.add(openTsdbMetric); } try { //send uses given batch point size if(openTsdb.send(openTsdbMetricSet)){ reply = "Import of " + openTsdbMetricSet.size() + " points successful. Metric name: " + metricName; } } catch (Exception e){ reply = "Error importing data points to openTsdb: " + e.getLocalizedMessage(); } } return reply; } @Override public Object getQueryObject(BenchmarkQuery benchmarkQuery) { TimeSeriesMetaData timeSeriesMetaData = benchmarkQuery.getTimeSeriesMetaData(); QueryFunction function = benchmarkQuery.getFunction(); Map<String, String> tags = timeSeriesMetaData.getTagKey_tagValue(); String escapedMetricName = openTSDBEscapeValue(timeSeriesMetaData.getMetricName()); StringBuilder tagString = new StringBuilder(); tagString.append("{"); for(Map.Entry<String, String> tag : tags.entrySet()){ tagString.append(tag.getKey() + "=").append(openTSDBEscapeValue(tag.getValue())).append(","); } tagString.deleteCharAt(tagString.length()-1); tagString.append("}"); // millisecond precision String startDate = openTsdbTimeString(timeSeriesMetaData.getStart()); String endDate = openTsdbTimeString(timeSeriesMetaData.getEnd()); /* // Downsampling long timespan = Duration.between(Instant.ofEpochMilli(timeSeriesMetaData.getStart()), Instant.ofEpochMilli(timeSeriesMetaData.getEnd())).toDays(); String aggregatedTimeSpan = timespan + "d"; //aggregatedTimeSpan = "1ms"; //if equals or less zero we try hours if(timespan <= 0){ timespan = Duration.between(Instant.ofEpochMilli(timeSeriesMetaData.getStart()), Instant.ofEpochMilli(timeSeriesMetaData.getEnd())).toHours(); aggregatedTimeSpan = timespan + "h"; } //if equals or less zero we try minutes if(timespan <= 0){ timespan = Duration.between(Instant.ofEpochMilli(timeSeriesMetaData.getStart()), Instant.ofEpochMilli(timeSeriesMetaData.getEnd())).toMinutes(); aggregatedTimeSpan = timespan + "m"; } //if equals or less zero we try millis if(timespan <= 0){ timespan = Duration.between(Instant.ofEpochMilli(timeSeriesMetaData.getStart()), Instant.ofEpochMilli(timeSeriesMetaData.getEnd())).toMillis(); aggregatedTimeSpan = timespan + "ms"; } */ String defaultAggregatedMetric = ""; switch (function) { case COUNT: defaultAggregatedMetric = "count";// + aggregatedTimeSpan + "-count"; //only for downsampling break; case MEAN: defaultAggregatedMetric = "avg";// + aggregatedTimeSpan + "-avg"; break; case SUM: defaultAggregatedMetric = "sum";// + aggregatedTimeSpan + "-sum"; break; case MIN: defaultAggregatedMetric = "min";// + aggregatedTimeSpan + "-min"; break; case MAX: defaultAggregatedMetric = "max";// + aggregatedTimeSpan + "-max"; break; case STDDEV: defaultAggregatedMetric = "dev";// + aggregatedTimeSpan + "-dev"; break; case PERCENTILE: Float p = benchmarkQuery.getPercentile(); if (p != null) { if(p <= 0.5){ defaultAggregatedMetric = "p50";// + aggregatedTimeSpan + "-p50"; } else if(p > 0.5 && p <= 0.75){ defaultAggregatedMetric = "p75";// + aggregatedTimeSpan + "-p75"; } else if(p > 0.75 && p <= 0.9){ defaultAggregatedMetric = "p90";// + aggregatedTimeSpan + "-p90"; } else if(p > 0.9 && p <= 0.95){ defaultAggregatedMetric = "p95";// + aggregatedTimeSpan + "-p95"; } else { defaultAggregatedMetric = "p99";// + aggregatedTimeSpan + "-p99"; } } break; case QUERY_ONLY: defaultAggregatedMetric = "sum";// + aggregatedTimeSpan; } defaultAggregatedMetric = defaultAggregatedMetric + ":" + escapedMetricName + "{tags}"; return new OpenTsdbQuery(startDate, endDate, defaultAggregatedMetric, tagString.toString()); } @Override public List<String> performQuery(BenchmarkQuery benchmarkQuery, Object queryObject) { List<String> queryResults = new LinkedList<>(); try{ OpenTsdbQuery query = ((OpenTsdbQuery) queryObject); String result = openTsdb.query(query.getStartDate(),query.getEndDate(),query.getAggregatedMetric(),query.getTagString()); queryResults.add(result); // TODO erase, only for debug queryResults.add("OpenTsdb aggregatedMetric: " + query.getAggregatedMetric()); queryResults.add("OpenTsdb tagString: " + query.getTagString()); queryResults.add("OpenTsdb startDate: " + query.getStartDate()); queryResults.add("OpenTsdb endData: " + query.getEndDate()); queryResults.add("OpenTsdb number of data points: " + getDataPointCount(result)); } catch (Exception e){ queryResults.add("OpenTSDB error performing query: " + e.getLocalizedMessage()); } return queryResults; } public String openTSDBEscapeValue(String value) { String escapedString = escape(value, ".").replaceAll("\\.\\.", ".").trim(); escapedString = escapedString.replaceAll("%", "Percent").trim(); escapedString = escapedString.replaceAll(":", "").trim(); escapedString = escapedString.replaceAll("\"", "").trim(); //Remove point if it is the first character if (escapedString.indexOf(".") == 0) { escapedString = escapedString.substring(1); } if (escapedString.lastIndexOf(".") == escapedString.length() - 1) { escapedString = escapedString.substring(0, escapedString.length() - 1); } escapedString = escapedString.replaceAll("\\.+", "."); return escapedString; } public String escape(String metric, String replacement) { return metric.replaceAll("(\\s|\\.|:|=|,|/|\\\\|\\*|\\(|\\)|_|#)", replacement); } private String opentTSDBDate(Instant date) { // "2014/12/27-12:48:20"; LocalDateTime localDateTime = LocalDateTime.ofInstant(date, ZoneId.systemDefault()); StringBuilder sb = new StringBuilder(); sb.append(localDateTime.getYear()) .append("/") .append(addDateSplit(localDateTime.getMonthValue())) .append("/") .append(addDateSplit(localDateTime.getDayOfMonth())) .append("-") .append(addDateSplit(localDateTime.getHour())) .append(":") .append(addDateSplit(localDateTime.getMinute())) .append(":") .append(addDateSplit(localDateTime.getSecond())); return sb.toString(); } private String openTsdbTimeString(Long epochMillis){ // first 10 or less digits are treated as seconds, milliseconds only with 3 digits precision. String result = epochMillis.toString(); if(result.length() >= 10){ String seconds = result.substring(0, 10); String millis = result.substring(10); if(millis.length() > 0 && millis.length() <= 3){ result = seconds + "." + millis; } else if (millis.length() > 3){ result = seconds + "." + millis.substring(0,3); } } return result; } private String addDateSplit(int value) { if (value < 10) { return "0" + value; } else { return "" + value; } } private int getDataPointCount(String openTsdbResultString){ String result = openTsdbResultString.substring(openTsdbResultString.indexOf("dps")); String[] splits = result.split(","); return splits.length; } }
TSDB_OpenTSDB_Interface/src/main/java/OpenTsdbInterface.java
import de.qaware.chronix.database.*; import java.time.Duration; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.temporal.ChronoUnit; import java.time.temporal.TemporalUnit; import java.util.*; import java.util.concurrent.TimeUnit; /** * Created by mcqueen666 on 06.09.16. */ public class OpenTsdbInterface implements BenchmarkDataSource { private final String OPENTSDB_STORAGE_DIRECTORY = "/opt/data"; private final int OPENTSDB_NUMBER_OF_POINTS_PER_BATCH = 10; private String ipAddress; private int portNumber; private boolean isSetup = false; private String dbName = "chronixBenchmark"; OpenTsdb openTsdb; @Override public boolean setup(String ipAddress, int portNumber) { if(!isSetup){ try { openTsdb = new OpenTsdb.Builder("http://" + ipAddress + ":" + portNumber).create(); //TODO wait until openTsdb is up and ready // openTsdb.setBatchSizeLimit(NUMBER_OF_POINTS_PER_BATCH); // openTsdb cannot handle many points at once (o.0) openTsdb.setBatchSizeLimit(OPENTSDB_NUMBER_OF_POINTS_PER_BATCH); this.ipAddress = ipAddress; this.portNumber = portNumber; } catch (Exception e){ isSetup = false; System.err.println("Error OpenTSDB setup: " + e.getLocalizedMessage()); } } return isSetup; } @Override public boolean clean() { return false; } @Override public void shutdown() { } @Override public String getStorageDirectoryPath() { return OPENTSDB_STORAGE_DIRECTORY; } @Override public String importDataPoints(TimeSeries timeSeries) { String reply = "Error importing data points to openTsdb."; if(timeSeries != null) { Map<String, String> tags = timeSeries.getTagKey_tagValue(); openTsdb.preAssignDimensions(tags.keySet()); // create escapted metricName String metricName = openTSDBEscapeValue(timeSeries.getMetricName()); // create escapted tags Map<String, String> metaData = new HashMap<>(); for(Map.Entry<String, String> tag : tags.entrySet()){ metaData.put(tag.getKey(), openTSDBEscapeValue(tag.getValue())); } Set<OpenTsdbMetric> openTsdbMetricSet = new HashSet<>(); for(TimeSeriesPoint point : timeSeries.getPoints()){ OpenTsdbMetric openTsdbMetric = OpenTsdbMetric.named(metricName) .withTags(metaData) .withTimestamp(point.getTimeStamp()) // TODO maybe wrong time unit .withValue(point.getValue()) .build(); openTsdbMetricSet.add(openTsdbMetric); } try { //send uses given batch point size if(openTsdb.send(openTsdbMetricSet)){ reply = "Import of " + openTsdbMetricSet.size() + " points successful. Metric name: " + metricName; } } catch (Exception e){ reply = "Error importing data points to openTsdb: " + e.getLocalizedMessage(); } } return reply; } @Override public Object getQueryObject(BenchmarkQuery benchmarkQuery) { TimeSeriesMetaData timeSeriesMetaData = benchmarkQuery.getTimeSeriesMetaData(); QueryFunction function = benchmarkQuery.getFunction(); Map<String, String> tags = timeSeriesMetaData.getTagKey_tagValue(); String escapedMetricName = openTSDBEscapeValue(timeSeriesMetaData.getMetricName()); StringBuilder tagString = new StringBuilder(); tagString.append("{"); for(Map.Entry<String, String> tag : tags.entrySet()){ tagString.append(tag.getKey() + "=").append(openTSDBEscapeValue(tag.getValue())).append(","); } tagString.deleteCharAt(tagString.length()-1); tagString.append("}"); //String startDate = opentTSDBDate(Instant.ofEpochMilli(timeSeriesMetaData.getStart()).minusSeconds(1)); // openTsdb l //String endDate = opentTSDBDate(Instant.ofEpochMilli(timeSeriesMetaData.getEnd()).plusSeconds(1)); String startDate = openTsdbTimeString(timeSeriesMetaData.getStart()); String endDate = openTsdbTimeString(timeSeriesMetaData.getEnd()); /* long timespan = Duration.between(Instant.ofEpochMilli(timeSeriesMetaData.getStart()), Instant.ofEpochMilli(timeSeriesMetaData.getEnd())).toDays(); String aggregatedTimeSpan = timespan + "d"; //aggregatedTimeSpan = "1ms"; //if equals or less zero we try hours if(timespan <= 0){ timespan = Duration.between(Instant.ofEpochMilli(timeSeriesMetaData.getStart()), Instant.ofEpochMilli(timeSeriesMetaData.getEnd())).toHours(); aggregatedTimeSpan = timespan + "h"; } //if equals or less zero we try minutes if(timespan <= 0){ timespan = Duration.between(Instant.ofEpochMilli(timeSeriesMetaData.getStart()), Instant.ofEpochMilli(timeSeriesMetaData.getEnd())).toMinutes(); aggregatedTimeSpan = timespan + "m"; } //if equals or less zero we try millis if(timespan <= 0){ timespan = Duration.between(Instant.ofEpochMilli(timeSeriesMetaData.getStart()), Instant.ofEpochMilli(timeSeriesMetaData.getEnd())).toMillis(); aggregatedTimeSpan = timespan + "ms"; } */ String defaultAggregatedMetric = ""; switch (function) { case COUNT: defaultAggregatedMetric = "count";// + aggregatedTimeSpan + "-count"; break; case MEAN: defaultAggregatedMetric = "avg";// + aggregatedTimeSpan + "-avg"; break; case SUM: defaultAggregatedMetric = "sum";// + aggregatedTimeSpan + "-sum"; break; case MIN: defaultAggregatedMetric = "min";// + aggregatedTimeSpan + "-min"; break; case MAX: defaultAggregatedMetric = "max";// + aggregatedTimeSpan + "-max"; break; case STDDEV: defaultAggregatedMetric = "dev";// + aggregatedTimeSpan + "-dev"; break; case PERCENTILE: Float p = benchmarkQuery.getPercentile(); if (p != null) { if(p <= 0.5){ defaultAggregatedMetric = "p50";// + aggregatedTimeSpan + "-p50"; } else if(p > 0.5 && p <= 0.75){ defaultAggregatedMetric = "p75";// + aggregatedTimeSpan + "-p75"; } else if(p > 0.75 && p <= 0.9){ defaultAggregatedMetric = "p90";// + aggregatedTimeSpan + "-p90"; } else if(p > 0.9 && p <= 0.95){ defaultAggregatedMetric = "p95";// + aggregatedTimeSpan + "-p95"; } else { defaultAggregatedMetric = "p99";// + aggregatedTimeSpan + "-p99"; } } break; case QUERY_ONLY: defaultAggregatedMetric = "sum";// + aggregatedTimeSpan; } defaultAggregatedMetric = defaultAggregatedMetric + ":" + escapedMetricName + "{tags}"; return new OpenTsdbQuery(startDate, endDate, defaultAggregatedMetric, tagString.toString()); } @Override public List<String> performQuery(BenchmarkQuery benchmarkQuery, Object queryObject) { List<String> queryResults = new LinkedList<>(); try{ OpenTsdbQuery query = ((OpenTsdbQuery) queryObject); String result = openTsdb.query(query.getStartDate(),query.getEndDate(),query.getAggregatedMetric(),query.getTagString()); queryResults.add(result); // TODO erase, only for debug queryResults.add("OpenTsdb aggregatedMetric: " + query.getAggregatedMetric()); queryResults.add("OpenTsdb tagString: " + query.getTagString()); queryResults.add("OpenTsdb startDate: " + query.getStartDate()); queryResults.add("OpenTsdb endData: " + query.getEndDate()); queryResults.add("OpenTsdb number of data points: " + getDataPointCount(result)); } catch (Exception e){ queryResults.add("OpenTSDB error performing query: " + e.getLocalizedMessage()); } return queryResults; } public String openTSDBEscapeValue(String value) { String escapedString = escape(value, ".").replaceAll("\\.\\.", ".").trim(); escapedString = escapedString.replaceAll("%", "Percent").trim(); escapedString = escapedString.replaceAll(":", "").trim(); escapedString = escapedString.replaceAll("\"", "").trim(); //Remove point if it is the first character if (escapedString.indexOf(".") == 0) { escapedString = escapedString.substring(1); } if (escapedString.lastIndexOf(".") == escapedString.length() - 1) { escapedString = escapedString.substring(0, escapedString.length() - 1); } escapedString = escapedString.replaceAll("\\.+", "."); return escapedString; } public String escape(String metric, String replacement) { return metric.replaceAll("(\\s|\\.|:|=|,|/|\\\\|\\*|\\(|\\)|_|#)", replacement); } private String opentTSDBDate(Instant date) { // "2014/12/27-12:48:20"; LocalDateTime localDateTime = LocalDateTime.ofInstant(date, ZoneId.systemDefault()); StringBuilder sb = new StringBuilder(); sb.append(localDateTime.getYear()) .append("/") .append(addDateSplit(localDateTime.getMonthValue())) .append("/") .append(addDateSplit(localDateTime.getDayOfMonth())) .append("-") .append(addDateSplit(localDateTime.getHour())) .append(":") .append(addDateSplit(localDateTime.getMinute())) .append(":") .append(addDateSplit(localDateTime.getSecond())); return sb.toString(); } private String openTsdbTimeString(Long epochMillis){ // first 10 or less digits are treaded as seconds, milliseconds only with 3 digits precision. String result = epochMillis.toString(); if(result.length() >= 10){ String seconds = result.substring(0, 10); String millis = result.substring(10); if(millis.length() > 0 && millis.length() <= 3){ result = seconds + "." + millis; } else if (millis.length() > 3){ result = seconds + "." + millis.substring(0,3); } } return result; } private String addDateSplit(int value) { if (value < 10) { return "0" + value; } else { return "" + value; } } private int getDataPointCount(String openTsdbResultString){ String result = openTsdbResultString.substring(openTsdbResultString.indexOf("dps")); String[] splits = result.split(","); return splits.length; } }
openTsdb: code comments added
TSDB_OpenTSDB_Interface/src/main/java/OpenTsdbInterface.java
openTsdb: code comments added
Java
apache-2.0
72e538a3490d7a6283f9a4ead6fcad2ea612c2bd
0
rhatlapa/undertow,rhusar/undertow,Karm/undertow,jamezp/undertow,rogerchina/undertow,jasonchaffee/undertow,baranowb/undertow,amannm/undertow,undertow-io/undertow,grassjedi/undertow,jstourac/undertow,baranowb/undertow,pedroigor/undertow,jasonchaffee/undertow,undertow-io/undertow,nkhuyu/undertow,jasonchaffee/undertow,undertow-io/undertow,golovnin/undertow,marschall/undertow,pedroigor/undertow,Karm/undertow,jamezp/undertow,ctomc/undertow,soul2zimate/undertow,emag/codereading-undertow,marschall/undertow,biddyweb/undertow,ctomc/undertow,aradchykov/undertow,rhusar/undertow,golovnin/undertow,darranl/undertow,popstr/undertow,emag/codereading-undertow,grassjedi/undertow,biddyweb/undertow,rhatlapa/undertow,n1hility/undertow,yonglehou/undertow,marschall/undertow,aldaris/undertow,msfm/undertow,msfm/undertow,stuartwdouglas/undertow,popstr/undertow,wildfly-security-incubator/undertow,popstr/undertow,jstourac/undertow,TomasHofman/undertow,stuartwdouglas/undertow,pedroigor/undertow,darranl/undertow,n1hility/undertow,ctomc/undertow,yonglehou/undertow,nkhuyu/undertow,aldaris/undertow,n1hility/undertow,TomasHofman/undertow,jstourac/undertow,pferraro/undertow,jamezp/undertow,rhusar/undertow,soul2zimate/undertow,golovnin/undertow,aradchykov/undertow,soul2zimate/undertow,biddyweb/undertow,amannm/undertow,grassjedi/undertow,rogerchina/undertow,yonglehou/undertow,darranl/undertow,msfm/undertow,amannm/undertow,stuartwdouglas/undertow,pferraro/undertow,aradchykov/undertow,baranowb/undertow,wildfly-security-incubator/undertow,rogerchina/undertow,rhatlapa/undertow,nkhuyu/undertow,wildfly-security-incubator/undertow,aldaris/undertow,TomasHofman/undertow,pferraro/undertow,Karm/undertow
package io.undertow.util; import java.util.ArrayDeque; import java.util.Deque; import java.util.LinkedHashMap; import java.util.Map; /** * Methods for dealing with the query string * * @author Stuart Douglas */ public class QueryParameterUtils { private QueryParameterUtils() { } public static String buildQueryString(final Map<String, Deque<String>> params) { StringBuilder sb = new StringBuilder(); boolean first = true; for (Map.Entry<String, Deque<String>> entry : params.entrySet()) { if (entry.getValue().isEmpty()) { if (first) { first = false; } else { sb.append('&'); } sb.append(entry.getKey()); sb.append('='); } else { for (String val : entry.getValue()) { if (first) { first = false; } else { sb.append('&'); } sb.append(entry.getKey()); sb.append('='); sb.append(val); } } } return sb.toString(); } /** * Parses a query string into a map * @param newQueryString The query string * @return The map of key value parameters */ public static Map<String, Deque<String>> parseQueryString(final String newQueryString) { Map<String, Deque<String>> newQueryParameters = new LinkedHashMap<String, Deque<String>>(); int startPos = 0; int equalPos = -1; for(int i = 0; i < newQueryString.length(); ++i) { char c = newQueryString.charAt(i); if(c == '=' && equalPos == -1) { equalPos = i; } else if(c == '&') { handleQueryParameter(newQueryString, newQueryParameters, startPos, equalPos, i); startPos = i + 1; equalPos = -1; } } if(startPos != newQueryString.length()) { handleQueryParameter(newQueryString, newQueryParameters, startPos, equalPos, newQueryString.length()); } return newQueryParameters; } private static void handleQueryParameter(String newQueryString, Map<String, Deque<String>> newQueryParameters, int startPos, int equalPos, int i) { String key; String value = ""; if(equalPos == -1) { key = newQueryString.substring(startPos, i); } else { key = newQueryString.substring(startPos, equalPos); value = newQueryString.substring(equalPos + 1, i); } Deque<String> queue = newQueryParameters.get(key); if (queue == null) { newQueryParameters.put(key, queue = new ArrayDeque<String>(1)); } if(value != null) { queue.add(value); } } public static Map<String, Deque<String>> mergeQueryParametersWithNewQueryString(final Map<String, Deque<String>> queryParameters, final String newQueryString) { Map<String, Deque<String>> newQueryParameters = parseQueryString(newQueryString); for (Map.Entry<String, Deque<String>> entry : queryParameters.entrySet()) { if (!newQueryParameters.containsKey(entry.getKey())) { newQueryParameters.put(entry.getKey(), new ArrayDeque<String>(entry.getValue())); } } return newQueryParameters; } }
core/src/main/java/io/undertow/util/QueryParameterUtils.java
package io.undertow.util; import java.util.ArrayDeque; import java.util.Deque; import java.util.LinkedHashMap; import java.util.Map; /** * Methods for dealing with the query string * * @author Stuart Douglas */ public class QueryParameterUtils { private QueryParameterUtils() { } public static String buildQueryString(final Map<String, Deque<String>> params) { StringBuilder sb = new StringBuilder(); boolean first = true; for (Map.Entry<String, Deque<String>> entry : params.entrySet()) { if (entry.getValue().isEmpty()) { if (first) { first = false; } else { sb.append('&'); } sb.append(entry.getKey()); sb.append('='); } else { for (String val : entry.getValue()) { if (first) { first = false; } else { sb.append('&'); } sb.append(entry.getKey()); sb.append('='); sb.append(val); } } } return sb.toString(); } /** * Parses a query string into a map * @param newQueryString The query string * @return The map of key value parameters */ public static Map<String, Deque<String>> parseQueryString(final String newQueryString) { Map<String, Deque<String>> newQueryParameters = new LinkedHashMap<String, Deque<String>>(); int startPos = 0; int equalPos = -1; for(int i = 0; i < newQueryString.length(); ++i) { char c = newQueryString.charAt(i); if(c == '=' && equalPos == -1) { equalPos = i; } else if(c == '&') { handleQueryParameter(newQueryString, newQueryParameters, startPos, equalPos, i); startPos = i + 1; equalPos = -1; } } if(startPos != newQueryString.length()) { handleQueryParameter(newQueryString, newQueryParameters, startPos, equalPos, newQueryString.length()); } return newQueryParameters; } private static void handleQueryParameter(String newQueryString, Map<String, Deque<String>> newQueryParameters, int startPos, int equalPos, int i) { String key; String value = null; if(equalPos == -1) { key = newQueryString.substring(startPos, i); } else { key = newQueryString.substring(startPos, equalPos); value = newQueryString.substring(equalPos + 1, i); } Deque<String> queue = newQueryParameters.get(key); if (queue == null) { newQueryParameters.put(key, queue = new ArrayDeque<String>(1)); } queue.add(value); } public static Map<String, Deque<String>> mergeQueryParametersWithNewQueryString(final Map<String, Deque<String>> queryParameters, final String newQueryString) { Map<String, Deque<String>> newQueryParameters = parseQueryString(newQueryString); for (Map.Entry<String, Deque<String>> entry : queryParameters.entrySet()) { if (!newQueryParameters.containsKey(entry.getKey())) { newQueryParameters.put(entry.getKey(), new ArrayDeque<String>(entry.getValue())); } } return newQueryParameters; } }
Fix NPE in query param aggregation
core/src/main/java/io/undertow/util/QueryParameterUtils.java
Fix NPE in query param aggregation
Java
apache-2.0
9226acc501d811e6529b958ebb00ed2e15cce5a1
0
Eduworks/ew,Eduworks/ew,Eduworks/ew
package com.eduworks.util.io; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; import java.net.URLConnection; import java.net.URLDecoder; import java.util.Collection; import java.util.Set; import java.util.regex.Pattern; import java.util.zip.DeflaterInputStream; import java.util.zip.GZIPInputStream; import org.apache.commons.io.IOUtils; import org.reflections.Reflections; import org.reflections.scanners.ResourcesScanner; import org.reflections.util.ClasspathHelper; import org.reflections.util.ConfigurationBuilder; import com.eduworks.lang.EwSet; import com.google.common.base.Predicate; import java.util.Enumeration; public class EwFileSystem { private static String OS = System.getProperty("os.name").toLowerCase(); public static boolean isWindows() { return (OS.indexOf("win") >= 0); } public static boolean isMac() { return (OS.indexOf("mac") >= 0); } public static boolean isUnix() { return (OS.indexOf("nix") >= 0 || OS.indexOf("nux") >= 0 || OS.indexOf("aix") > 0); } public static boolean isSolaris() { return (OS.indexOf("sunos") >= 0); } public static String webConfigurationPath = null; public static String getWebConfigurationPath() { try { getDefaultLocationPath(true); } catch (IOException e) { e.printStackTrace(); } if (webConfigurationPath == null) { if (System.getProperty("eduworks.webapp.config") != null) { webConfigurationPath = new File(System.getProperty("eduworks.webapp.config")).getParent(); } } return webConfigurationPath; } public static File tryFindFile(String estimatedPath, Class<? extends Object> inThisClassJar, boolean permanantFile, boolean isWebResource) { try { return findFile(estimatedPath, inThisClassJar, permanantFile, isWebResource); } catch (IOException e) { return null; } } public static File copyPackage(final String pkg, final Class<? extends Object> inThisClassJar) throws IOException { Collection<URL> urlsForCurrentClasspath = ClasspathHelper.forManifest();// forPackage(pkg, // ClassLoader.getSystemClassLoader()); EwSet<URL> urls = new EwSet<URL>(); for (URL url : urlsForCurrentClasspath) { urls.add(url); } Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(urls).setScanners( new ResourcesScanner().filterResultsBy(new Predicate<String>() { @Override public boolean apply(String input) { try { findFile(input, inThisClassJar, true, false); } catch (IOException e) { e.printStackTrace(); } return true; } }))); Set<String> resources = reflections.getResources(Pattern.compile(".*")); for (String s : resources) { if (s.startsWith(pkg)) { findFile(s, inThisClassJar, true, false); } } return new File(getDefaultLocationPath(false), pkg); } public static File copyPackageToRoot(final String pkg, final Class<? extends Object> inThisClassJar) throws IOException { Collection<URL> urlsForCurrentClasspath = ClasspathHelper.forManifest();// forPackage(pkg, // ClassLoader.getSystemClassLoader()); EwSet<URL> urls = new EwSet<URL>(); for (URL url : urlsForCurrentClasspath) { urls.add(url); } Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(urls).setScanners( new ResourcesScanner().filterResultsBy(new Predicate<String>() { @Override public boolean apply(String input) { try { findFileIn(input, input, inThisClassJar, true, false); } catch (IOException e) { e.printStackTrace(); } return true; } }))); Set<String> resources = reflections.getResources(Pattern.compile(".*")); for (String s : resources) { if (s.startsWith(pkg)) { findFileIn(s, s, inThisClassJar, true, false); } } return new File(pkg); } public static File copyPackageChildrenToRoot(final String pkg, final Class<? extends Object> inThisClassJar) throws IOException { Collection<URL> urlsForCurrentClasspath = ClasspathHelper.forManifest();// forPackage(pkg, // ClassLoader.getSystemClassLoader()); EwSet<URL> urls = new EwSet<URL>(); for (URL url : urlsForCurrentClasspath) { urls.add(url); } Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(urls).setScanners( new ResourcesScanner().filterResultsBy(new Predicate<String>() { @Override public boolean apply(String input) { try { findFileIn(input.replace(pkg + "/", ""), input, inThisClassJar, true, false); } catch (IOException e) { e.printStackTrace(); } return true; } }))); Set<String> resources = reflections.getResources(Pattern.compile(".*")); for (String s : resources) { if (s.startsWith(pkg)) { findFileIn(s.replace(pkg + "/", ""), s, inThisClassJar, true, false); } } return new File("."); } /* * Estimated Path should not include '/'. */ public static File findFileIn(String desiredPath, String estimatedPath, Class<? extends Object> inThisClassJar, boolean permanantFile, boolean isWebResource) throws IOException { File targetPath = new File(desiredPath); if (targetPath.exists()) { return targetPath; } URL possibleFile = null; InputStream possibleInputStream = null; if (inThisClassJar != null) { possibleFile = inThisClassJar.getResource(estimatedPath); possibleInputStream = inThisClassJar.getResourceAsStream(estimatedPath); if (possibleFile == null) { possibleFile = inThisClassJar.getResource("/" + estimatedPath); possibleInputStream = inThisClassJar.getResourceAsStream("/" + estimatedPath); } } if (possibleFile == null) { possibleFile = EwFileSystem.class.getResource(estimatedPath); possibleInputStream = EwFileSystem.class.getResourceAsStream(estimatedPath); } if (possibleFile == null) { possibleFile = EwFileSystem.class.getResource("/" + estimatedPath); possibleInputStream = EwFileSystem.class.getResourceAsStream("/" + estimatedPath); } if (possibleFile == null) { File file = new File(desiredPath); if (file.exists()) { possibleFile = file.toURI().toURL(); possibleInputStream = new FileInputStream(file); } } if (possibleFile == null) { throw new IOException("Could not find file: " + estimatedPath); } if (possibleInputStream != null) { if (targetPath.exists() == false) { if (targetPath.getParentFile() != null) { targetPath.getParentFile().mkdirs(); } targetPath.createNewFile(); } FileOutputStream targetOutputStream = new FileOutputStream(targetPath); IOUtils.copy(possibleInputStream, targetOutputStream); IOUtils.closeQuietly(targetOutputStream); IOUtils.closeQuietly(possibleInputStream); } targetPath.mkdirs(); if (!permanantFile) { targetPath.deleteOnExit(); } return targetPath; } public static File findFile(String estimatedPath, Class<? extends Object> inThisClassJar, boolean permanantFile, boolean isWebResource) throws IOException { String rootPath = null; if (isWebResource) { rootPath = getWebConfigurationPath(); } if (rootPath == null) { rootPath = getDefaultLocationPath(isWebResource); } File targetPath = new File(rootPath, estimatedPath); if (targetPath.exists()) { return targetPath; } URL possibleFile = null; InputStream possibleInputStream = null; if (inThisClassJar != null) { String jar = inThisClassJar.getProtectionDomain().getCodeSource().getLocation().toString().split("!")[0]; Enumeration<URL> resources = inThisClassJar.getClassLoader().getResources(estimatedPath); while (resources.hasMoreElements()) { URL url = resources.nextElement(); if (url.toString().contains(jar)) { possibleFile = url; possibleInputStream = url.openStream(); } } if (possibleFile == null) { resources = inThisClassJar.getClassLoader().getResources("/" + estimatedPath); while (resources.hasMoreElements()) { URL url = resources.nextElement(); if (url.toString().contains(jar)) { possibleFile = url; possibleInputStream = url.openStream(); } } } } if (possibleFile == null) { possibleFile = EwFileSystem.class.getResource(estimatedPath); possibleInputStream = EwFileSystem.class.getResourceAsStream(estimatedPath); } if (possibleFile == null) { possibleFile = EwFileSystem.class.getResource("/" + estimatedPath); possibleInputStream = EwFileSystem.class.getResourceAsStream("/" + estimatedPath); } if (possibleFile == null) { File file = new File(estimatedPath); if (file.exists()) { possibleFile = file.toURI().toURL(); possibleInputStream = new FileInputStream(file); } } if (possibleFile == null && possibleInputStream == null) { throw new IOException("Could not find file: " + estimatedPath); } System.out.println(possibleFile); if (possibleInputStream != null) { if (targetPath.exists() == false) { targetPath.getParentFile().mkdirs(); targetPath.createNewFile(); } FileOutputStream targetOutputStream = new FileOutputStream(targetPath); IOUtils.copy(possibleInputStream, targetOutputStream); IOUtils.closeQuietly(targetOutputStream); IOUtils.closeQuietly(possibleInputStream); } targetPath.mkdirs(); // TODO: If this thing is a directory, and it is in a jar, then we want // to copy all the files in the directory in the jar into the target // location. if (!permanantFile) { targetPath.deleteOnExit(); } return targetPath; } /** * Close a stream or reader/writer object. * * @param stream Stream to close. */ public static void closeIt(Object stream) { try { if (stream instanceof InputStream) { ((InputStream) stream).close(); } else if (stream instanceof OutputStream) { ((OutputStream) stream).close(); } else if (stream instanceof Reader) { ((Reader) stream).close(); } else if (stream instanceof Writer) { ((Writer) stream).close(); } } catch (IOException e) { // TODO: log this instead? e.printStackTrace(); } } /** * Download the content of a URL to a specific local file or a temporary * file if no local file is specified. Returns the local file used. * * @param path URL to download. * @param localFile To this local file. * @param timeout Timeout for download. * @return Downloaded file or null. * @throws IOException Error in downloading or writing file. */ public static File downloadFile(String path, File localFile, int timeout) throws IOException { URL uri; FileOutputStream fileOutputStream = null; InputStream inputStream = null; try { try { uri = new URL(path); } catch (MalformedURLException e) { uri = new URL(URLDecoder.decode(path)); } URLConnection connection = uri.openConnection(); connection.setConnectTimeout(timeout); connection.setReadTimeout(timeout); connection.setRequestProperty("Accept", "*/*"); connection.connect(); inputStream = connection.getInputStream(); // If the local file is null, create a temporary file to hold the // content if (localFile == null) { String ext = null; try { String headerField = connection.getHeaderField("content-disposition"); if (headerField != null && headerField.split(";")[0].equals("attachment")) { String filename = headerField.split(";")[1].split("=")[1]; if (filename.contains(".")) { ext = filename.split("\\.")[filename.split("\\.").length - 1]; } } } catch (Exception ex) { System.out.println("Download of File: Could not determine extension appropriately from header."); System.out.println(uri); ex.printStackTrace(); } if (ext == null) { ext = uri.getPath().substring(uri.getPath().lastIndexOf("/") + 1); } if (ext == null || ext.isEmpty()) { if (connection.getContentType() != null) { if (!connection.getContentType().endsWith("/")) { ext = connection.getContentType().substring(connection.getContentType().lastIndexOf('/') + 1); } } } if (ext.contains(".")) { ext = ext.substring(ext.indexOf(".")); } localFile = File.createTempFile("foo", "." + removeNonazAZStatic(ext)); } fileOutputStream = new FileOutputStream(localFile); if (connection.getContentEncoding() != null && connection.getContentEncoding().equals("gzip")) { inputStream = new GZIPInputStream(inputStream); } if (connection.getContentEncoding() != null && connection.getContentEncoding().equals("deflate")) { inputStream = new DeflaterInputStream(inputStream); } IOUtils.copy(inputStream, fileOutputStream); return localFile; } finally { closeIt(inputStream); closeIt(fileOutputStream); } } private static String removeNonazAZStatic(String _text) { for (int i = 0; i < _text.length(); i++) { char k = _text.charAt(i); if (!(k >= 'a' && k <= 'z') && !(k >= 'A' && k <= 'Z')) { StringBuilder sb = new StringBuilder(); for (int j = 0; j < _text.length(); j++) { char c = _text.charAt(j); if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')) { sb.append(_text.charAt(j)); } } return sb.toString(); } } return _text; } public static String resolve(URL uri) throws IOException { ByteArrayOutputStream baos = null; InputStream inputStream = null; try { URLConnection connection = uri.openConnection(); inputStream = connection.getInputStream(); connection.connect(); baos = new ByteArrayOutputStream(); if (connection.getContentEncoding() != null && connection.getContentEncoding().equals("gzip")) { inputStream = new GZIPInputStream(inputStream); } if (connection.getContentEncoding() != null && connection.getContentEncoding().equals("deflate")) { inputStream = new DeflaterInputStream(inputStream); } IOUtils.copy(inputStream, baos); return new String(baos.toByteArray()); } finally { closeIt(inputStream); closeIt(baos); } } public static File downloadFile(String path) throws IOException { return downloadFile(path, null, 5 * 60 * 1000); } public static File downloadFile(String path, int timeoutms) throws IOException { return downloadFile(path, null, timeoutms); } private static String getDefaultLocationPath(boolean isWebResource) throws IOException { if (webConfigurationPath != null) { return webConfigurationPath; } String rootPath; rootPath = createTempDirectory().getName(); if (isWebResource) { webConfigurationPath = rootPath; } return rootPath; } private static File createTempDirectory() throws IOException { File createTempFile = File.createTempFile("ewww", "tmp"); createTempFile.delete(); createTempFile = new File(createTempFile.getParentFile(), "etc"); createTempFile.mkdirs(); createTempFile.mkdir(); createTempFile.deleteOnExit(); return createTempFile; } public static void deleteEventually(File file) { if (file == null) { return; } if (!file.exists()) { return; } if (!file.delete()) { file.deleteOnExit(); } } public static void placeInWorkingDirectoryTemporarily(File findFile, String relativePath) { File dest = new File(relativePath); if (dest.exists()) { return; } dest.getParentFile().mkdirs(); FileInputStream input; try { input = new FileInputStream(findFile); dest.createNewFile(); FileOutputStream output = new FileOutputStream(dest); IOUtils.copy(input, output); IOUtils.closeQuietly(input); IOUtils.closeQuietly(output); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } dest.deleteOnExit(); } public static void copyPackage(String path, String string, Class<? extends Object> class1) { } }
ew.common/src/main/java/com/eduworks/util/io/EwFileSystem.java
package com.eduworks.util.io; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; import java.net.URLConnection; import java.net.URLDecoder; import java.util.Collection; import java.util.Set; import java.util.regex.Pattern; import java.util.zip.DeflaterInputStream; import java.util.zip.GZIPInputStream; import org.apache.commons.io.IOUtils; import org.reflections.Reflections; import org.reflections.scanners.ResourcesScanner; import org.reflections.util.ClasspathHelper; import org.reflections.util.ConfigurationBuilder; import com.eduworks.lang.EwSet; import com.google.common.base.Predicate; public class EwFileSystem { private static String OS = System.getProperty("os.name").toLowerCase(); public static boolean isWindows() { return (OS.indexOf("win") >= 0); } public static boolean isMac() { return (OS.indexOf("mac") >= 0); } public static boolean isUnix() { return (OS.indexOf("nix") >= 0 || OS.indexOf("nux") >= 0 || OS.indexOf("aix") > 0); } public static boolean isSolaris() { return (OS.indexOf("sunos") >= 0); } public static String webConfigurationPath = null; public static String getWebConfigurationPath() { try { getDefaultLocationPath(true); } catch (IOException e) { e.printStackTrace(); } if (webConfigurationPath == null) if (System.getProperty("eduworks.webapp.config") != null) webConfigurationPath = new File(System.getProperty("eduworks.webapp.config")).getParent(); return webConfigurationPath; } public static File tryFindFile(String estimatedPath, Class<? extends Object> inThisClassJar, boolean permanantFile, boolean isWebResource) { try { return findFile(estimatedPath, inThisClassJar, permanantFile, isWebResource); } catch (IOException e) { return null; } } public static File copyPackage(final String pkg, final Class<? extends Object> inThisClassJar) throws IOException { Collection<URL> urlsForCurrentClasspath = ClasspathHelper.forManifest();// forPackage(pkg, // ClassLoader.getSystemClassLoader()); EwSet<URL> urls = new EwSet<URL>(); for (URL url : urlsForCurrentClasspath) urls.add(url); Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(urls).setScanners( new ResourcesScanner().filterResultsBy(new Predicate<String>() { @Override public boolean apply(String input) { try { findFile(input, inThisClassJar, true, false); } catch (IOException e) { e.printStackTrace(); } return true; } }))); Set<String> resources = reflections.getResources(Pattern.compile(".*")); for (String s : resources) if (s.startsWith(pkg)) findFile(s, inThisClassJar, true, false); return new File(getDefaultLocationPath(false), pkg); } public static File copyPackageToRoot(final String pkg, final Class<? extends Object> inThisClassJar) throws IOException { Collection<URL> urlsForCurrentClasspath = ClasspathHelper.forManifest();// forPackage(pkg, // ClassLoader.getSystemClassLoader()); EwSet<URL> urls = new EwSet<URL>(); for (URL url : urlsForCurrentClasspath) urls.add(url); Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(urls).setScanners( new ResourcesScanner().filterResultsBy(new Predicate<String>() { @Override public boolean apply(String input) { try { findFileIn(input, input, inThisClassJar, true, false); } catch (IOException e) { e.printStackTrace(); } return true; } }))); Set<String> resources = reflections.getResources(Pattern.compile(".*")); for (String s : resources) if (s.startsWith(pkg)) findFileIn(s, s, inThisClassJar, true, false); return new File(pkg); } public static File copyPackageChildrenToRoot(final String pkg, final Class<? extends Object> inThisClassJar) throws IOException { Collection<URL> urlsForCurrentClasspath = ClasspathHelper.forManifest();// forPackage(pkg, // ClassLoader.getSystemClassLoader()); EwSet<URL> urls = new EwSet<URL>(); for (URL url : urlsForCurrentClasspath) urls.add(url); Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(urls).setScanners( new ResourcesScanner().filterResultsBy(new Predicate<String>() { @Override public boolean apply(String input) { try { findFileIn(input.replace(pkg + "/", ""), input, inThisClassJar, true, false); } catch (IOException e) { e.printStackTrace(); } return true; } }))); Set<String> resources = reflections.getResources(Pattern.compile(".*")); for (String s : resources) if (s.startsWith(pkg)) findFileIn(s.replace(pkg + "/", ""), s, inThisClassJar, true, false); return new File("."); } /* * Estimated Path should not include '/'. */ public static File findFileIn(String desiredPath, String estimatedPath, Class<? extends Object> inThisClassJar, boolean permanantFile, boolean isWebResource) throws IOException { File targetPath = new File(desiredPath); if (targetPath.exists()) return targetPath; URL possibleFile = null; InputStream possibleInputStream = null; if (inThisClassJar != null) { possibleFile = inThisClassJar.getResource(estimatedPath); possibleInputStream = inThisClassJar.getResourceAsStream(estimatedPath); if (possibleFile == null) { possibleFile = inThisClassJar.getResource("/" + estimatedPath); possibleInputStream = inThisClassJar.getResourceAsStream("/" + estimatedPath); } } if (possibleFile == null) { possibleFile = EwFileSystem.class.getResource(estimatedPath); possibleInputStream = EwFileSystem.class.getResourceAsStream(estimatedPath); } if (possibleFile == null) { possibleFile = EwFileSystem.class.getResource("/" + estimatedPath); possibleInputStream = EwFileSystem.class.getResourceAsStream("/" + estimatedPath); } if (possibleFile == null) { File file = new File(desiredPath); if (file.exists()) { possibleFile = file.toURI().toURL(); possibleInputStream = new FileInputStream(file); } } if (possibleFile == null) throw new IOException("Could not find file: " + estimatedPath); if (possibleInputStream != null) { if (targetPath.exists() == false) { if (targetPath.getParentFile() != null) targetPath.getParentFile().mkdirs(); targetPath.createNewFile(); } FileOutputStream targetOutputStream = new FileOutputStream(targetPath); IOUtils.copy(possibleInputStream, targetOutputStream); IOUtils.closeQuietly(targetOutputStream); IOUtils.closeQuietly(possibleInputStream); } targetPath.mkdirs(); if (!permanantFile) targetPath.deleteOnExit(); return targetPath; } public static File findFile(String estimatedPath, Class<? extends Object> inThisClassJar, boolean permanantFile, boolean isWebResource) throws IOException { String rootPath = null; if (isWebResource) rootPath = getWebConfigurationPath(); if (rootPath == null) { rootPath = getDefaultLocationPath(isWebResource); } File targetPath = new File(rootPath, estimatedPath); if (targetPath.exists()) return targetPath; URL possibleFile = null; InputStream possibleInputStream = null; if (inThisClassJar != null) { possibleFile = inThisClassJar.getResource(estimatedPath); possibleInputStream = inThisClassJar.getResourceAsStream(estimatedPath); if (possibleFile == null) { possibleFile = inThisClassJar.getResource("/" + estimatedPath); possibleInputStream = inThisClassJar.getResourceAsStream("/" + estimatedPath); } } if (possibleFile == null) { possibleFile = EwFileSystem.class.getResource(estimatedPath); possibleInputStream = EwFileSystem.class.getResourceAsStream(estimatedPath); } if (possibleFile == null) { possibleFile = EwFileSystem.class.getResource("/" + estimatedPath); possibleInputStream = EwFileSystem.class.getResourceAsStream("/" + estimatedPath); } if (possibleFile == null) { File file = new File(estimatedPath); if (file.exists()) { possibleFile = file.toURI().toURL(); possibleInputStream = new FileInputStream(file); } } if (possibleFile == null) throw new IOException("Could not find file: " + estimatedPath); System.out.println(possibleFile); if (possibleInputStream != null) { if (targetPath.exists() == false) { targetPath.getParentFile().mkdirs(); targetPath.createNewFile(); } FileOutputStream targetOutputStream = new FileOutputStream(targetPath); IOUtils.copy(possibleInputStream, targetOutputStream); IOUtils.closeQuietly(targetOutputStream); IOUtils.closeQuietly(possibleInputStream); } targetPath.mkdirs(); // TODO: If this thing is a directory, and it is in a jar, then we want // to copy all the files in the directory in the jar into the target // location. if (!permanantFile) targetPath.deleteOnExit(); return targetPath; } /** * Close a stream or reader/writer object. * @param stream Stream to close. */ public static void closeIt(Object stream) { try { if (stream instanceof InputStream) ((InputStream) stream).close(); else if (stream instanceof OutputStream) ((OutputStream) stream).close(); else if (stream instanceof Reader) ((Reader) stream).close(); else if (stream instanceof Writer) ((Writer) stream).close(); } catch (IOException e) { // TODO: log this instead? e.printStackTrace(); } } /** * Download the content of a URL to a specific local file or a temporary * file if no local file is specified. Returns the local file used. * * @param path URL to download. * @param localFile To this local file. * @param timeout Timeout for download. * @return Downloaded file or null. * @throws IOException Error in downloading or writing file. */ public static File downloadFile(String path, File localFile, int timeout) throws IOException { URL uri; FileOutputStream fileOutputStream = null; InputStream inputStream = null; try { try { uri = new URL(path); } catch (MalformedURLException e) { uri = new URL(URLDecoder.decode(path)); } URLConnection connection = uri.openConnection(); connection.setConnectTimeout(timeout); connection.setReadTimeout(timeout); connection.setRequestProperty("Accept", "*/*"); connection.connect(); inputStream = connection.getInputStream(); // If the local file is null, create a temporary file to hold the // content if (localFile == null) { String ext = null; try { String headerField = connection.getHeaderField("content-disposition"); if (headerField != null && headerField.split(";")[0].equals("attachment")) { String filename = headerField.split(";")[1].split("=")[1]; if (filename.contains(".")) ext = filename.split("\\.")[filename.split("\\.").length-1]; } } catch (Exception ex) { System.out.println("Download of File: Could not determine extension appropriately from header."); System.out.println(uri); ex.printStackTrace(); } if (ext == null) ext = uri.getPath().substring(uri.getPath().lastIndexOf("/") + 1); if (ext == null || ext.isEmpty()) if (connection.getContentType() != null) if (!connection.getContentType().endsWith("/")) ext = connection.getContentType().substring(connection.getContentType().lastIndexOf('/') + 1); if (ext.contains(".")) ext = ext.substring(ext.indexOf(".")); localFile = File.createTempFile("foo", "." + removeNonazAZStatic(ext)); } fileOutputStream = new FileOutputStream(localFile); if (connection.getContentEncoding() != null && connection.getContentEncoding().equals("gzip")) inputStream = new GZIPInputStream(inputStream); if (connection.getContentEncoding() != null && connection.getContentEncoding().equals("deflate")) inputStream = new DeflaterInputStream(inputStream); IOUtils.copy(inputStream, fileOutputStream); return localFile; } finally { closeIt(inputStream); closeIt(fileOutputStream); } } private static String removeNonazAZStatic(String _text) { for (int i = 0; i < _text.length(); i++) { char k = _text.charAt(i); if (!(k >= 'a' && k <= 'z') && !(k >= 'A' && k <= 'Z')) { StringBuilder sb = new StringBuilder(); for (int j = 0; j < _text.length(); j++) { char c = _text.charAt(j); if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')) sb.append(_text.charAt(j)); } return sb.toString(); } } return _text; } public static String resolve(URL uri) throws IOException { ByteArrayOutputStream baos = null; InputStream inputStream = null; try { URLConnection connection = uri.openConnection(); inputStream = connection.getInputStream(); connection.connect(); baos = new ByteArrayOutputStream(); if (connection.getContentEncoding() != null && connection.getContentEncoding().equals("gzip")) inputStream = new GZIPInputStream(inputStream); if (connection.getContentEncoding() != null && connection.getContentEncoding().equals("deflate")) inputStream = new DeflaterInputStream(inputStream); IOUtils.copy(inputStream, baos); return new String(baos.toByteArray()); } finally { closeIt(inputStream); closeIt(baos); } } public static File downloadFile(String path) throws IOException { return downloadFile(path, null, 5 * 60 * 1000); } public static File downloadFile(String path, int timeoutms) throws IOException { return downloadFile(path, null, timeoutms); } private static String getDefaultLocationPath(boolean isWebResource) throws IOException { if (webConfigurationPath != null) return webConfigurationPath; String rootPath; rootPath = createTempDirectory().getName(); if (isWebResource) webConfigurationPath = rootPath; return rootPath; } private static File createTempDirectory() throws IOException { File createTempFile = File.createTempFile("ewww", "tmp"); createTempFile.delete(); createTempFile = new File(createTempFile.getParentFile(), "etc"); createTempFile.mkdirs(); createTempFile.mkdir(); createTempFile.deleteOnExit(); return createTempFile; } public static void deleteEventually(File file) { if (file == null) return; if (!file.exists()) return; if (!file.delete()) file.deleteOnExit(); } public static void placeInWorkingDirectoryTemporarily(File findFile, String relativePath) { File dest = new File(relativePath); if (dest.exists()) return; dest.getParentFile().mkdirs(); FileInputStream input; try { input = new FileInputStream(findFile); dest.createNewFile(); FileOutputStream output = new FileOutputStream(dest); IOUtils.copy(input, output); IOUtils.closeQuietly(input); IOUtils.closeQuietly(output); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } dest.deleteOnExit(); } public static void copyPackage(String path, String string, Class<? extends Object> class1) { } }
EwFileSystem now filters jars better when finding files.
ew.common/src/main/java/com/eduworks/util/io/EwFileSystem.java
EwFileSystem now filters jars better when finding files.
Java
apache-2.0
b6567505573a2641c855735088b22dfe6eb395cd
0
icoloma/simpleds
package org.simpleds.cache; import com.google.common.collect.Maps; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Serializable; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.WeakHashMap; /** * The thread-bound Level 1 cache container. * This cache will store Key-entity pair values or String-Query data (be it a query count or a list of returning Key values). * @author Nacho * */ public class Level1Cache { /** the thread-bound instance */ private static ThreadLocal<Level1Cache> threadLocal = new ThreadLocal<Level1Cache>(); /** the cache contents (the key can be a Key or a String) */ private Map<Serializable, Object> contents = new WeakHashMap<Serializable, Object>(); private static Logger log = LoggerFactory.getLogger(Level1Cache.class); /** * Initializes the Level 1 cache for this thread. * This method should be invoked at the beginning of processing this request. */ public static void setCacheInstance() { threadLocal.set(new Level1Cache()); } /** * Clears the Level1Cache associated to this thread * This method should be invoked at the end of processing this request. */ public static void clearCacheInstance() { threadLocal.remove(); } /** * Return the Level1Cache associated to this thread. * @return the Level1Cache associated to this thread. May be null. */ public static Level1Cache getCacheInstance() { return threadLocal.get(); } @SuppressWarnings("unchecked") public <T> T get(Serializable key) { T value = (T) contents.get(key); if (log.isDebugEnabled() && value != null) { log.debug("Level 1 cache hit: " + key); } return value; } public void put(Serializable key, Object instance) { contents.put(key, instance); } public void delete(Serializable key) { contents.remove(key); } public void delete(Collection<? extends Serializable> keys) { for (Serializable key : keys) { contents.remove(key); } if (log.isDebugEnabled()) { log.debug("Deleted from Level 1 cache: " + keys); } } @SuppressWarnings("unchecked") public <T> Map<Serializable, T> get(Collection<? extends Serializable> keys) { Map<Serializable, T> result = Maps.newHashMapWithExpectedSize(keys.size()); for (Serializable key : keys) { T value = (T) contents.get(key); if (value != null) { result.put(key, value); } } if (log.isDebugEnabled() && !result.isEmpty()) { log.debug("Level 1 cache multiple hit: " + result.keySet()); } return result; } public <T> void put(Collection<? extends Serializable> keys, Collection<T> javaObjects) { Iterator<? extends Serializable> itKey = keys.iterator(); Iterator<T> itJava = javaObjects.iterator(); while (itKey.hasNext()) { contents.put(itKey.next(), itJava.next()); } } }
src/main/java/org/simpleds/cache/Level1Cache.java
package org.simpleds.cache; import java.io.Serializable; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.WeakHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Maps; /** * The thread-bound Level 1 cache container. * This cache will store Key-entity pair values or String-Query data (be it a query count or a list of returning Key values). * @author Nacho * */ class Level1Cache { /** the thread-bound instance */ private static ThreadLocal<Level1Cache> threadLocal = new ThreadLocal<Level1Cache>(); /** the cache contents (the key can be a Key or a String) */ private Map<Serializable, Object> contents = new WeakHashMap<Serializable, Object>(); private static Logger log = LoggerFactory.getLogger(Level1Cache.class); /** * Initializes the Level 1 cache for this thread. * This method should be invoked at the beginning of processing this request. */ public static void setCacheInstance() { threadLocal.set(new Level1Cache()); } /** * Clears the Level1Cache associated to this thread * This method should be invoked at the end of processing this request. */ public static void clearCacheInstance() { threadLocal.remove(); } /** * Return the Level1Cache associated to this thread. * @return the Level1Cache associated to this thread. May be null. */ public static Level1Cache getCacheInstance() { return threadLocal.get(); } @SuppressWarnings("unchecked") public <T> T get(Serializable key) { T value = (T) contents.get(key); if (log.isDebugEnabled() && value != null) { log.debug("Level 1 cache hit: " + key); } return value; } public void put(Serializable key, Object instance) { contents.put(key, instance); } public void delete(Serializable key) { contents.remove(key); } public void delete(Collection<? extends Serializable> keys) { for (Serializable key : keys) { contents.remove(key); } if (log.isDebugEnabled()) { log.debug("Deleted from Level 1 cache: " + keys); } } @SuppressWarnings("unchecked") public <T> Map<Serializable, T> get(Collection<? extends Serializable> keys) { Map<Serializable, T> result = Maps.newHashMapWithExpectedSize(keys.size()); for (Serializable key : keys) { T value = (T) contents.get(key); if (value != null) { result.put(key, value); } } if (log.isDebugEnabled() && !result.isEmpty()) { log.debug("Level 1 cache multiple hit: " + result.keySet()); } return result; } public <T> void put(Collection<? extends Serializable> keys, Collection<T> javaObjects) { Iterator<? extends Serializable> itKey = keys.iterator(); Iterator<T> itJava = javaObjects.iterator(); while (itKey.hasNext()) { contents.put(itKey.next(), itJava.next()); } } }
Level1Cache made public to make third parties caching behavior testable
src/main/java/org/simpleds/cache/Level1Cache.java
Level1Cache made public to make third parties caching behavior testable
Java
apache-2.0
e5b1eaac510947455b769f457b786500c74c7144
0
diydyq/velocity-engine,diydyq/velocity-engine,pcollaog/velocity-engine,pcollaog/velocity-engine
package org.apache.velocity.test; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.Iterator; import org.apache.velocity.VelocityContext; import org.apache.velocity.exception.VelocityException; import org.apache.velocity.runtime.RuntimeConstants; /** * This class tests support for strict foreach mode. */ public class StrictForeachTestCase extends BaseEvalTestCase { public StrictForeachTestCase(String name) { super(name); } public void setUp() throws Exception { super.setUp(); engine.setProperty(RuntimeConstants.SKIP_INVALID_ITERATOR, Boolean.FALSE); context.put("good", new GoodIterable()); context.put("bad", new BadIterable()); context.put("ugly", new UglyIterable()); } public void testGood() { try { evaluate("#foreach( $i in $good )$i#end"); } catch (VelocityException ve) { fail("Doing #foreach on $good should not have exploded!"); } } public void testBad() { try { evaluate("#foreach( $i in $bad )$i#end"); fail("Doing #foreach on $bad should have exploded!"); } catch (VelocityException ve) { // success! } } public void testUgly() { try { evaluate("#foreach( $i in $ugly )$i#end"); fail("Doing #foreach on $ugly should have exploded!"); } catch (VelocityException ve) { // success! } } public static class GoodIterable { public Iterator iterator() { return new ArrayList().iterator(); } } public static class BadIterable { public Object iterator() { return new Object(); } } public static class UglyIterable { public Iterator iterator() { return null; } } }
src/test/org/apache/velocity/test/StrictForeachTestCase.java
package org.apache.velocity.test; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.Collections; import java.util.Iterator; import org.apache.velocity.VelocityContext; import org.apache.velocity.exception.VelocityException; import org.apache.velocity.runtime.RuntimeConstants; /** * This class tests support for strict foreach mode. */ public class StrictForeachTestCase extends BaseEvalTestCase { public StrictForeachTestCase(String name) { super(name); } public void setUp() throws Exception { super.setUp(); engine.setProperty(RuntimeConstants.SKIP_INVALID_ITERATOR, Boolean.FALSE); context.put("good", new GoodIterable()); context.put("bad", new BadIterable()); context.put("ugly", new UglyIterable()); } public void testGood() { try { evaluate("#foreach( $i in $good )$i#end"); } catch (VelocityException ve) { fail("Doing #foreach on $good should not have exploded!"); } } public void testBad() { try { evaluate("#foreach( $i in $bad )$i#end"); fail("Doing #foreach on $bad should have exploded!"); } catch (VelocityException ve) { // success! } } public void testUgly() { try { evaluate("#foreach( $i in $ugly )$i#end"); fail("Doing #foreach on $ugly should have exploded!"); } catch (VelocityException ve) { // success! } } public static class GoodIterable { public Iterator iterator() { return Collections.emptyList().iterator(); } } public static class BadIterable { public Object iterator() { return new Object(); } } public static class UglyIterable { public Iterator iterator() { return null; } } }
remove another java 1.5-ism that snuck in git-svn-id: efce15992eafbd71412d600875d997890b9afd93@691449 13f79535-47bb-0310-9956-ffa450edef68
src/test/org/apache/velocity/test/StrictForeachTestCase.java
remove another java 1.5-ism that snuck in
Java
apache-2.0
1d01008ae0dce81e90eaaed66fc798cf01c110fb
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.python.packaging; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.intellij.execution.ExecutionException; import com.intellij.execution.configurations.PathEnvironmentVariableUtil; import com.intellij.execution.process.ProcessOutput; import com.intellij.openapi.components.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.StandardFileSystems; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.SystemProperties; import com.intellij.util.xmlb.XmlSerializerUtil; import com.jetbrains.python.PythonHelpersLocator; import com.jetbrains.python.sdk.PythonSdkType; import com.jetbrains.python.sdk.flavors.CondaEnvSdkFlavor; import com.jetbrains.python.sdk.flavors.PyCondaRunKt; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.SystemDependent; import java.io.File; import java.util.*; @State(name = "PyCondaPackageService", storages = @Storage(value="conda_packages.xml", roamingType = RoamingType.DISABLED)) public class PyCondaPackageService implements PersistentStateComponent<PyCondaPackageService> { private static final Logger LOG = Logger.getInstance(PyCondaPackageService.class); private static final String CONDA_ENVS_DIR = "envs"; @Nullable @SystemDependent public String PREFERRED_CONDA_PATH = null; @Override public PyCondaPackageService getState() { return this; } @Override public void loadState(@NotNull PyCondaPackageService state) { XmlSerializerUtil.copyBean(state, this); } public static PyCondaPackageService getInstance() { return ServiceManager.getService(PyCondaPackageService.class); } @Nullable public static String getCondaPython() { final String conda = getSystemCondaExecutable(); if (conda != null) { final String python = getCondaBasePython(conda); if (python != null) return python; } return getCondaExecutableByName(getPythonName()); } @Nullable public static String getCondaBasePython(@NotNull String systemCondaExecutable) { final VirtualFile condaFile = LocalFileSystem.getInstance().findFileByPath(systemCondaExecutable); if (condaFile != null) { final VirtualFile condaDir = SystemInfo.isWindows ? condaFile.getParent().getParent() : condaFile.getParent(); final VirtualFile python = condaDir.findChild(getPythonName()); if (python != null) { return python.getPath(); } } return null; } @NotNull private static String getPythonName() { return SystemInfo.isWindows ? "python.exe" : "python"; } @Nullable public static String getSystemCondaExecutable() { final String condaName = SystemInfo.isWindows ? "conda.exe" : "conda"; final File condaInPath = PathEnvironmentVariableUtil.findInPath(condaName); if (condaInPath != null) return condaInPath.getPath(); return getCondaExecutableByName(condaName); } @Nullable public static String getCondaExecutable(@Nullable String sdkPath) { if (sdkPath != null) { String condaPath = findCondaExecutableRelativeToEnv(sdkPath); if (condaPath != null) return condaPath; } if (StringUtil.isNotEmpty(getInstance().PREFERRED_CONDA_PATH)) { return getInstance().PREFERRED_CONDA_PATH; } return getSystemCondaExecutable(); } @Nullable private static String findCondaExecutableRelativeToEnv(@NotNull String sdkPath) { final VirtualFile pyExecutable = StandardFileSystems.local().findFileByPath(sdkPath); if (pyExecutable == null) { return null; } final VirtualFile pyExecutableDir = pyExecutable.getParent(); final boolean isBaseConda = pyExecutableDir.findChild(CONDA_ENVS_DIR) != null; final String condaName; final VirtualFile condaFolder; if (SystemInfo.isWindows) { condaName = "conda.exe"; // On Windows python.exe is directly inside base interpreter/environment directory. // On other systems executable normally resides in "bin" subdirectory. condaFolder = pyExecutableDir; } else { condaName = "conda"; condaFolder = pyExecutableDir.getParent(); } // XXX Do we still need to support this? When did they drop per-environment conda executable? final String localCondaName = SystemInfo.isWindows && !isBaseConda ? "conda.bat" : condaName; final String immediateConda = findExecutable(localCondaName, condaFolder); if (immediateConda != null) { return immediateConda; } final VirtualFile envsDir = condaFolder.getParent(); if (!isBaseConda && envsDir != null && envsDir.getName().equals(CONDA_ENVS_DIR)) { return findExecutable(condaName, envsDir.getParent()); } return null; } @Nullable private static String getCondaExecutableByName(@NotNull final String condaName) { final VirtualFile userHome = LocalFileSystem.getInstance().findFileByPath(SystemProperties.getUserHome().replace('\\', '/')); if (userHome != null) { for (String root : CondaEnvSdkFlavor.CONDA_DEFAULT_ROOTS) { VirtualFile condaFolder = userHome.findChild(root); String executableFile = findExecutable(condaName, condaFolder); if (executableFile != null) return executableFile; if (SystemInfo.isWindows) { final VirtualFile appData = userHome.findFileByRelativePath("AppData\\Local\\Continuum\\" + root); executableFile = findExecutable(condaName, appData); if (executableFile != null) return executableFile; condaFolder = LocalFileSystem.getInstance().findFileByPath("C:\\" + root); executableFile = findExecutable(condaName, condaFolder); if (executableFile != null) return executableFile; } } } if (!SystemInfo.isWindows) { final VirtualFile systemCondaFolder = LocalFileSystem.getInstance().findFileByPath("/opt/anaconda"); final String executableFile = findExecutable(condaName, systemCondaFolder); if (executableFile != null) return executableFile; } return null; } @Nullable private static String findExecutable(String condaName, @Nullable final VirtualFile condaFolder) { if (condaFolder != null) { final VirtualFile binFolder = condaFolder.findChild(SystemInfo.isWindows ? "Scripts" : "bin"); if (binFolder != null) { final VirtualFile bin = binFolder.findChild(condaName); if (bin != null) { String directoryPath = bin.getPath(); final String executableFile = PythonSdkType.getExecutablePath(directoryPath, condaName); if (executableFile != null) { return executableFile; } } } } return null; } @Nullable public Multimap<String, String> listAllPackagesAndVersions() { try { final String output = runCondaPackagingHelper("listall"); final Multimap<String, String> nameToVersions = Multimaps.newSortedSetMultimap(new HashMap<>(), () -> new TreeSet<>(PyPackageVersionComparator.getSTR_COMPARATOR().reversed())); for (String line : StringUtil.split(output, "\n")) { final List<String> split = StringUtil.split(line, "\t"); if (split.size() < 2) continue; nameToVersions.put(split.get(0), split.get(1)); } return nameToVersions; } catch (ExecutionException e) { LOG.warn("Failed to get list of conda packages. " + e); return null; } } @NotNull public List<String> listPackageVersions(@NotNull String packageName) throws ExecutionException { final String output = runCondaPackagingHelper("versions", packageName); return StringUtil.split(output, "\n"); } @Nullable public List<String> listChannels() throws ExecutionException { final String output = runCondaPackagingHelper("channels"); return StringUtil.split(output, "\n"); } @NotNull private static String runCondaPackagingHelper(@NotNull String... args) throws ExecutionException { final List<String> commandArgs = new ArrayList<>(); commandArgs.add(PythonHelpersLocator.getHelperPath("conda_packaging_tool.py")); commandArgs.addAll(Arrays.asList(args)); final String condaPython = getCondaPython(); if (condaPython == null) { throw new PyExecutionException("Cannot find Python executable for conda", "python", commandArgs, new ProcessOutput()); } final ProcessOutput output = PyCondaRunKt.runCondaPython(condaPython, commandArgs); return output.getStdout(); } }
python/src/com/jetbrains/python/packaging/PyCondaPackageService.java
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.python.packaging; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.intellij.execution.ExecutionException; import com.intellij.execution.configurations.PathEnvironmentVariableUtil; import com.intellij.execution.process.ProcessOutput; import com.intellij.openapi.components.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.StandardFileSystems; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.SystemProperties; import com.intellij.util.xmlb.XmlSerializerUtil; import com.jetbrains.python.PythonHelpersLocator; import com.jetbrains.python.sdk.PythonSdkType; import com.jetbrains.python.sdk.flavors.CondaEnvSdkFlavor; import com.jetbrains.python.sdk.flavors.PyCondaRunKt; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.SystemDependent; import java.io.File; import java.util.*; @State(name = "PyCondaPackageService", storages = @Storage(value="conda_packages.xml", roamingType = RoamingType.DISABLED)) public class PyCondaPackageService implements PersistentStateComponent<PyCondaPackageService> { private static final Logger LOG = Logger.getInstance(PyCondaPackageService.class); @Nullable @SystemDependent public String PREFERRED_CONDA_PATH = null; @Override public PyCondaPackageService getState() { return this; } @Override public void loadState(@NotNull PyCondaPackageService state) { XmlSerializerUtil.copyBean(state, this); } public static PyCondaPackageService getInstance() { return ServiceManager.getService(PyCondaPackageService.class); } @Nullable public static String getCondaPython() { final String conda = getSystemCondaExecutable(); if (conda != null) { final String python = getCondaBasePython(conda); if (python != null) return python; } return getCondaExecutableByName(getPythonName()); } @Nullable public static String getCondaBasePython(@NotNull String systemCondaExecutable) { final VirtualFile condaFile = LocalFileSystem.getInstance().findFileByPath(systemCondaExecutable); if (condaFile != null) { final VirtualFile condaDir = SystemInfo.isWindows ? condaFile.getParent().getParent() : condaFile.getParent(); final VirtualFile python = condaDir.findChild(getPythonName()); if (python != null) { return python.getPath(); } } return null; } @NotNull private static String getPythonName() { return SystemInfo.isWindows ? "python.exe" : "python"; } @Nullable public static String getSystemCondaExecutable() { final String condaName = SystemInfo.isWindows ? "conda.exe" : "conda"; final File condaInPath = PathEnvironmentVariableUtil.findInPath(condaName); if (condaInPath != null) return condaInPath.getPath(); return getCondaExecutableByName(condaName); } @Nullable public static String getCondaExecutable(@Nullable String sdkPath) { if (sdkPath != null) { String condaPath = findCondaExecutableRelativeToEnv(sdkPath); if (condaPath != null) return condaPath; } if (StringUtil.isNotEmpty(getInstance().PREFERRED_CONDA_PATH)) { return getInstance().PREFERRED_CONDA_PATH; } return getSystemCondaExecutable(); } private static String findCondaExecutableRelativeToEnv(@NotNull String sdkPath) { final VirtualFile pyExecutable = StandardFileSystems.local().findFileByPath(sdkPath); if (pyExecutable == null) { return null; } final VirtualFile pyExecutableDir = pyExecutable.getParent(); String condaName = "conda"; if (SystemInfo.isWindows) { condaName = pyExecutableDir.findChild("envs") != null ? "conda.exe" : "conda.bat"; } final VirtualFile conda = pyExecutableDir.findChild(condaName); if (conda != null) return conda.getPath(); // On Windows python.exe is directly inside base interpreter/environment directory. // On other systems executable normally resides in "bin" subdirectory. final VirtualFile condaFolder = SystemInfo.isWindows ? pyExecutableDir : pyExecutableDir.getParent(); return findExecutable(condaName, condaFolder); } @Nullable private static String getCondaExecutableByName(@NotNull final String condaName) { final VirtualFile userHome = LocalFileSystem.getInstance().findFileByPath(SystemProperties.getUserHome().replace('\\', '/')); if (userHome != null) { for (String root : CondaEnvSdkFlavor.CONDA_DEFAULT_ROOTS) { VirtualFile condaFolder = userHome.findChild(root); String executableFile = findExecutable(condaName, condaFolder); if (executableFile != null) return executableFile; if (SystemInfo.isWindows) { final VirtualFile appData = userHome.findFileByRelativePath("AppData\\Local\\Continuum\\" + root); executableFile = findExecutable(condaName, appData); if (executableFile != null) return executableFile; condaFolder = LocalFileSystem.getInstance().findFileByPath("C:\\" + root); executableFile = findExecutable(condaName, condaFolder); if (executableFile != null) return executableFile; } } } if (!SystemInfo.isWindows) { final VirtualFile systemCondaFolder = LocalFileSystem.getInstance().findFileByPath("/opt/anaconda"); final String executableFile = findExecutable(condaName, systemCondaFolder); if (executableFile != null) return executableFile; } return null; } @Nullable private static String findExecutable(String condaName, @Nullable final VirtualFile condaFolder) { if (condaFolder != null) { final VirtualFile binFolder = condaFolder.findChild(SystemInfo.isWindows ? "Scripts" : "bin"); if (binFolder != null) { final VirtualFile bin = binFolder.findChild(condaName); if (bin != null) { String directoryPath = bin.getPath(); final String executableFile = PythonSdkType.getExecutablePath(directoryPath, condaName); if (executableFile != null) { return executableFile; } } } } return null; } @Nullable public Multimap<String, String> listAllPackagesAndVersions() { try { final String output = runCondaPackagingHelper("listall"); final Multimap<String, String> nameToVersions = Multimaps.newSortedSetMultimap(new HashMap<>(), () -> new TreeSet<>(PyPackageVersionComparator.getSTR_COMPARATOR().reversed())); for (String line : StringUtil.split(output, "\n")) { final List<String> split = StringUtil.split(line, "\t"); if (split.size() < 2) continue; nameToVersions.put(split.get(0), split.get(1)); } return nameToVersions; } catch (ExecutionException e) { LOG.warn("Failed to get list of conda packages. " + e); return null; } } @NotNull public List<String> listPackageVersions(@NotNull String packageName) throws ExecutionException { final String output = runCondaPackagingHelper("versions", packageName); return StringUtil.split(output, "\n"); } @Nullable public List<String> listChannels() throws ExecutionException { final String output = runCondaPackagingHelper("channels"); return StringUtil.split(output, "\n"); } @NotNull private static String runCondaPackagingHelper(@NotNull String... args) throws ExecutionException { final List<String> commandArgs = new ArrayList<>(); commandArgs.add(PythonHelpersLocator.getHelperPath("conda_packaging_tool.py")); commandArgs.addAll(Arrays.asList(args)); final String condaPython = getCondaPython(); if (condaPython == null) { throw new PyExecutionException("Cannot find Python executable for conda", "python", commandArgs, new ProcessOutput()); } final ProcessOutput output = PyCondaRunKt.runCondaPython(condaPython, commandArgs); return output.getStdout(); } }
PY-35141 Discover base conda executable for environments in its "envs" directory In case base conda was installed in a non-standard location. It's still not clear how to do it for environments created in an arbitrary directory ("--prefix" option of "conda create" command). GitOrigin-RevId: 744698634cc11f55ef3970d725c8183c890fc7ed
python/src/com/jetbrains/python/packaging/PyCondaPackageService.java
PY-35141 Discover base conda executable for environments in its "envs" directory
Java
bsd-3-clause
b6c8e12efe5a74c55c814b84315ceb38a28b925c
0
joansmith/basex,BaseXdb/basex,joansmith/basex,JensErat/basex,joansmith/basex,JensErat/basex,dimitarp/basex,dimitarp/basex,JensErat/basex,vincentml/basex,ksclarke/basex,deshmnnit04/basex,joansmith/basex,dimitarp/basex,JensErat/basex,ksclarke/basex,deshmnnit04/basex,vincentml/basex,dimitarp/basex,dimitarp/basex,ksclarke/basex,drmacro/basex,vincentml/basex,BaseXdb/basex,BaseXdb/basex,ksclarke/basex,joansmith/basex,drmacro/basex,ksclarke/basex,dimitarp/basex,joansmith/basex,deshmnnit04/basex,JensErat/basex,BaseXdb/basex,JensErat/basex,JensErat/basex,ksclarke/basex,drmacro/basex,joansmith/basex,deshmnnit04/basex,vincentml/basex,drmacro/basex,deshmnnit04/basex,vincentml/basex,ksclarke/basex,JensErat/basex,joansmith/basex,BaseXdb/basex,drmacro/basex,BaseXdb/basex,JensErat/basex,deshmnnit04/basex,BaseXdb/basex,drmacro/basex,vincentml/basex,BaseXdb/basex,joansmith/basex,ksclarke/basex,ksclarke/basex,dimitarp/basex,drmacro/basex,drmacro/basex,vincentml/basex,joansmith/basex,vincentml/basex,deshmnnit04/basex,vincentml/basex,dimitarp/basex,drmacro/basex,dimitarp/basex,deshmnnit04/basex,deshmnnit04/basex,vincentml/basex,BaseXdb/basex,drmacro/basex,vincentml/basex,BaseXdb/basex,deshmnnit04/basex,JensErat/basex,JensErat/basex,deshmnnit04/basex,joansmith/basex,dimitarp/basex,JensErat/basex,BaseXdb/basex,deshmnnit04/basex,vincentml/basex,drmacro/basex,ksclarke/basex,drmacro/basex,dimitarp/basex,dimitarp/basex,ksclarke/basex,joansmith/basex,ksclarke/basex,BaseXdb/basex
package org.basex.query.func.proc; import static org.basex.util.Token.*; import org.basex.query.*; import org.basex.query.util.Err.ErrType; import org.basex.query.value.item.*; import org.basex.util.*; /** * Function implementation. * * @author BaseX Team 2005-14, BSD License * @author Christian Gruen */ public final class ProcSystem extends ProcFn { @Override public Item item(final QueryContext qc, final InputInfo ii) throws QueryException { final Result result = exec(qc); if(result.code == 0) return Str.get(norm(result.output)); // create error message final QNm name = new QNm(ErrType.BXPR + String.format("%04d", result.code)); throw new QueryException(info, name, string(norm(result.error))); } }
basex-core/src/main/java/org/basex/query/func/proc/ProcSystem.java
package org.basex.query.func.proc; import static org.basex.util.Token.*; import org.basex.query.*; import org.basex.query.value.item.*; import org.basex.util.*; /** * Function implementation. * * @author BaseX Team 2005-14, BSD License * @author Christian Gruen */ public final class ProcSystem extends ProcFn { @Override public Item item(final QueryContext qc, final InputInfo ii) throws QueryException { final Result result = exec(qc); if(result.code == 0) return Str.get(norm(result.output)); // create error message final QNm name = new QNm("PROC" + String.format("%04d", result.code)); throw new QueryException(info, name, string(norm(result.error))); } }
[FIX] XQuery, Process Module: PROC.... → BXPR....
basex-core/src/main/java/org/basex/query/func/proc/ProcSystem.java
[FIX] XQuery, Process Module: PROC.... → BXPR....
Java
bsd-3-clause
60035d23acbdbd23dd7ddce76980bea404d1b5ac
0
lutece-platform/lutece-core,rzara/lutece-core,lutece-platform/lutece-core,rzara/lutece-core,rzara/lutece-core,lutece-platform/lutece-core
/* * Copyright (c) 2002-2009, Mairie de Paris * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright notice * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice * and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * License 1.0 */ package fr.paris.lutece.portal.business.style; import java.util.ArrayList; import java.util.Collection; import fr.paris.lutece.util.ReferenceList; import fr.paris.lutece.util.sql.DAOUtil; /** * This class provides Data Access methods for Theme objects */ public final class ThemeDAO implements IThemeDAO { private static final String SQL_QUERY_SELECT = " SELECT code_theme, theme_description, path_images, path_css, theme_author, " + " theme_author_url, theme_version, theme_licence FROM core_theme WHERE code_theme = ?"; private static final String SQL_QUERY_INSERT = " INSERT INTO core_theme ( code_theme, theme_description, path_images, path_css," + " theme_author, theme_author_url, theme_version, theme_licence ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ? )"; private static final String SQL_QUERY_DELETE = " DELETE FROM core_theme WHERE code_theme = ?"; private static final String SQL_QUERY_UPDATE = " UPDATE core_theme SET theme_description = ?, path_images = ?, " + " path_css = ? , theme_author = ?, theme_author_url = ?, theme_version = ?, " + " theme_licence = ? WHERE code_theme = ?"; private static final String SQL_QUERY_SELECTALL = " SELECT code_theme, theme_description, path_images, path_css, theme_author, " + " theme_author_url, theme_version, theme_licence FROM core_theme ORDER BY code_theme"; private static final String SQL_QUERY_SELECT_THEME = " SELECT code_theme, theme_description FROM core_theme"; /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#insert(Theme) */ public synchronized void insert( Theme theme ) { DAOUtil daoUtil = new DAOUtil( SQL_QUERY_INSERT ); daoUtil.setString( 1, theme.getCodeTheme( ) ); daoUtil.setString( 2, theme.getThemeDescription( ) ); daoUtil.setString( 3, theme.getPathImages( ) ); daoUtil.setString( 4, theme.getPathCss( ) ); daoUtil.setString( 5, theme.getThemeAuthor( ) ); daoUtil.setString( 6, theme.getThemeAuthorUrl( ) ); daoUtil.setString( 7, theme.getThemeVersion( ) ); daoUtil.setString( 8, theme.getThemeLicence( ) ); daoUtil.executeUpdate( ); daoUtil.free( ); } /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#load(String) */ public Theme load( String strCodeTheme ) { Theme theme = null; DAOUtil daoUtil = new DAOUtil( SQL_QUERY_SELECT ); daoUtil.setString( 1, strCodeTheme ); daoUtil.executeQuery( ); if( daoUtil.next( ) ) { theme = new Theme( ); theme.setCodeTheme( daoUtil.getString( 1 ) ); theme.setThemeDescription( daoUtil.getString( 2 ) ); theme.setPathImages( daoUtil.getString( 3 ) ); theme.setPathCss( daoUtil.getString( 4 ) ); theme.setThemeAuthor( daoUtil.getString( 5 ) ); theme.setThemeAuthorUrl( daoUtil.getString( 6 ) ); theme.setThemeVersion( daoUtil.getString( 7 ) ); theme.setThemeLicence( daoUtil.getString( 8 ) ); } daoUtil.free( ); return theme; } /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#delete(String) */ public void delete( String strCodeTheme ) { DAOUtil daoUtil = new DAOUtil( SQL_QUERY_DELETE ); daoUtil.setString( 1, strCodeTheme ); daoUtil.executeUpdate( ); daoUtil.free( ); } /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#store(fr.paris.lutece.portal.business.style.Theme) */ public void store( Theme theme ) { DAOUtil daoUtil = new DAOUtil( SQL_QUERY_UPDATE ); daoUtil.setString( 1, theme.getThemeDescription( ) ); daoUtil.setString( 2, theme.getPathImages( ) ); daoUtil.setString( 3, theme.getPathCss( ) ); daoUtil.setString( 4, theme.getThemeAuthor( ) ); daoUtil.setString( 5, theme.getThemeAuthorUrl( ) ); daoUtil.setString( 6, theme.getThemeVersion( ) ); daoUtil.setString( 7, theme.getThemeLicence( ) ); daoUtil.setString( 8, theme.getCodeTheme( ) ); daoUtil.executeUpdate( ); daoUtil.free( ); } /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#selectThemesList() */ public Collection<Theme> selectThemesList( ) { Collection<Theme> themeList = new ArrayList<Theme>( ); DAOUtil daoUtil = new DAOUtil( SQL_QUERY_SELECTALL ); daoUtil.executeQuery( ); while( daoUtil.next( ) ) { Theme theme = new Theme( ); theme.setCodeTheme( daoUtil.getString( 1 ) ); theme.setThemeDescription( daoUtil.getString( 2 ) ); theme.setPathImages( daoUtil.getString( 3 ) ); theme.setPathCss( daoUtil.getString( 4 ) ); theme.setThemeAuthor( daoUtil.getString( 5 ) ); theme.setThemeAuthorUrl( daoUtil.getString( 6 ) ); theme.setThemeVersion( daoUtil.getString( 7 ) ); theme.setThemeLicence( daoUtil.getString( 8 ) ); themeList.add( theme ); } daoUtil.free( ); return themeList; } /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#getThemesList() */ public ReferenceList getThemesList( ) { ReferenceList themesList = new ReferenceList( ); DAOUtil daoUtil = new DAOUtil( SQL_QUERY_SELECT_THEME ); daoUtil.executeQuery( ); while( daoUtil.next( ) ) { themesList.addItem( daoUtil.getString( 1 ), daoUtil.getString( 2 ) ); } daoUtil.free( ); return themesList; } }
src/java/fr/paris/lutece/portal/business/style/ThemeDAO.java
/* * Copyright (c) 2002-2009, Mairie de Paris * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright notice * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice * and the following disclaimer in the documentation and/or other materials * provided with the distribution. * * 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * License 1.0 */ package fr.paris.lutece.portal.business.style; import java.util.ArrayList; import java.util.Collection; import fr.paris.lutece.util.ReferenceList; import fr.paris.lutece.util.sql.DAOUtil; /** * This class provides Data Access methods for Theme objects */ public final class ThemeDAO implements IThemeDAO { private static final String SQL_QUERY_SELECT = " SELECT code_theme, theme_description, path_images, path_css, theme_author, " + " theme_author_url, theme_version, theme_licence FROM core_theme WHERE code_theme = ?"; private static final String SQL_QUERY_INSERT = " INSERT INTO core_theme ( code_theme, theme_description, path_images, path_css," + " theme_author, theme_author_url, theme_version, theme_licence ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ? )"; private static final String SQL_QUERY_DELETE = " DELETE FROM core_theme WHERE code_theme = ?"; private static final String SQL_QUERY_UPDATE = " UPDATE core_theme SET theme_description = ?, path_images = ?, " + " path_css = ? , theme_author = ?, theme_author_url = ?, theme_version = ?, " + " theme_licence = ? WHERE code_theme = ?"; private static final String SQL_QUERY_SELECTALL = " SELECT code_theme, theme_description, path_images, path_css, theme_author, " + " theme_author_url, theme_version, theme_licence FROM core_theme ORDER BY code_theme"; private static final String SQL_QUERY_SELECT_THEME = " SELECT id_theme , description_theme FROM core_theme"; /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#insert(Theme) */ public synchronized void insert( Theme theme ) { DAOUtil daoUtil = new DAOUtil( SQL_QUERY_INSERT ); daoUtil.setString( 1, theme.getCodeTheme( ) ); daoUtil.setString( 2, theme.getThemeDescription( ) ); daoUtil.setString( 3, theme.getPathImages( ) ); daoUtil.setString( 4, theme.getPathCss( ) ); daoUtil.setString( 5, theme.getThemeAuthor( ) ); daoUtil.setString( 6, theme.getThemeAuthorUrl( ) ); daoUtil.setString( 7, theme.getThemeVersion( ) ); daoUtil.setString( 8, theme.getThemeLicence( ) ); daoUtil.executeUpdate( ); daoUtil.free( ); } /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#load(String) */ public Theme load( String strCodeTheme ) { Theme theme = null; DAOUtil daoUtil = new DAOUtil( SQL_QUERY_SELECT ); daoUtil.setString( 1, strCodeTheme ); daoUtil.executeQuery( ); if( daoUtil.next( ) ) { theme = new Theme( ); theme.setCodeTheme( daoUtil.getString( 1 ) ); theme.setThemeDescription( daoUtil.getString( 2 ) ); theme.setPathImages( daoUtil.getString( 3 ) ); theme.setPathCss( daoUtil.getString( 4 ) ); theme.setThemeAuthor( daoUtil.getString( 5 ) ); theme.setThemeAuthorUrl( daoUtil.getString( 6 ) ); theme.setThemeVersion( daoUtil.getString( 7 ) ); theme.setThemeLicence( daoUtil.getString( 8 ) ); } daoUtil.free( ); return theme; } /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#delete(String) */ public void delete( String strCodeTheme ) { DAOUtil daoUtil = new DAOUtil( SQL_QUERY_DELETE ); daoUtil.setString( 1, strCodeTheme ); daoUtil.executeUpdate( ); daoUtil.free( ); } /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#store(fr.paris.lutece.portal.business.style.Theme) */ public void store( Theme theme ) { DAOUtil daoUtil = new DAOUtil( SQL_QUERY_UPDATE ); daoUtil.setString( 1, theme.getThemeDescription( ) ); daoUtil.setString( 2, theme.getPathImages( ) ); daoUtil.setString( 3, theme.getPathCss( ) ); daoUtil.setString( 4, theme.getThemeAuthor( ) ); daoUtil.setString( 5, theme.getThemeAuthorUrl( ) ); daoUtil.setString( 6, theme.getThemeVersion( ) ); daoUtil.setString( 7, theme.getThemeLicence( ) ); daoUtil.setString( 8, theme.getCodeTheme( ) ); daoUtil.executeUpdate( ); daoUtil.free( ); } /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#selectThemesList() */ public Collection<Theme> selectThemesList( ) { Collection<Theme> themeList = new ArrayList<Theme>( ); DAOUtil daoUtil = new DAOUtil( SQL_QUERY_SELECTALL ); daoUtil.executeQuery( ); while( daoUtil.next( ) ) { Theme theme = new Theme( ); theme.setCodeTheme( daoUtil.getString( 1 ) ); theme.setThemeDescription( daoUtil.getString( 2 ) ); theme.setPathImages( daoUtil.getString( 3 ) ); theme.setPathCss( daoUtil.getString( 4 ) ); theme.setThemeAuthor( daoUtil.getString( 5 ) ); theme.setThemeAuthorUrl( daoUtil.getString( 6 ) ); theme.setThemeVersion( daoUtil.getString( 7 ) ); theme.setThemeLicence( daoUtil.getString( 8 ) ); themeList.add( theme ); } daoUtil.free( ); return themeList; } /* * (non-Javadoc) * * @see fr.paris.lutece.portal.business.style.IThemeDAO#getThemesList() */ public ReferenceList getThemesList( ) { ReferenceList themesList = new ReferenceList( ); DAOUtil daoUtil = new DAOUtil( SQL_QUERY_SELECT_THEME ); daoUtil.executeQuery( ); while( daoUtil.next( ) ) { themesList.addItem( daoUtil.getInt( 1 ), daoUtil.getString( 2 ) ); } daoUtil.free( ); return themesList; } }
LUTECE-1066 : modify bad columns git-svn-id: 890dd67775b5971c21efd90062c158582082fe1b@13797 bab10101-e421-0410-a517-8ce0973de3ef
src/java/fr/paris/lutece/portal/business/style/ThemeDAO.java
LUTECE-1066 : modify bad columns
Java
bsd-3-clause
b1dbe420f68a79ef06178b3dffc8d4bafd22b8e6
0
ndexbio/ndex-rest,ndexbio/ndex-rest,ndexbio/ndex-rest,ndexbio/ndex-rest
/** * Copyright (c) 2013, 2016, The Regents of the University of California, The Cytoscape Consortium * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ package org.ndexbio.rest.services; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; import java.sql.SQLException; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import javax.annotation.security.PermitAll; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.DefaultValue; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Context; import javax.ws.rs.core.GenericType; import javax.ws.rs.core.MultivaluedHashMap; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import org.apache.commons.io.IOUtils; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.common.SolrDocumentList; import org.ndexbio.common.models.dao.postgresql.GroupDAO; import org.ndexbio.common.models.dao.postgresql.NetworkDAO; import org.ndexbio.common.models.dao.postgresql.UserDAO; import org.ndexbio.common.persistence.CXNetworkLoader; import org.ndexbio.common.solr.SingleNetworkSolrIdxManager; import org.ndexbio.common.util.NdexUUIDFactory; import org.ndexbio.model.errorcodes.NDExError; import org.ndexbio.model.exceptions.BadRequestException; import org.ndexbio.model.exceptions.ForbiddenOperationException; import org.ndexbio.model.exceptions.NdexException; import org.ndexbio.model.exceptions.ObjectNotFoundException; import org.ndexbio.model.exceptions.UnauthorizedOperationException; import org.ndexbio.model.network.query.EdgeCollectionQuery; import org.ndexbio.model.object.CXSimplePathQuery; import org.ndexbio.model.object.Group; import org.ndexbio.model.object.NetworkSearchResult; import org.ndexbio.model.object.SimpleNetworkQuery; import org.ndexbio.model.object.SimpleQuery; import org.ndexbio.model.object.SolrSearchResult; import org.ndexbio.model.object.User; import org.ndexbio.model.object.network.VisibilityType; import org.ndexbio.rest.Configuration; import org.ndexbio.rest.filters.BasicAuthenticationFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @Path("/v2/search") public class SearchServiceV2 extends NdexService { // private static final String GOOGLE_OAUTH_FLAG = "USE_GOOGLE_AUTHENTICATION"; // private static final String GOOGLE_OATH_KEY = "GOOGLE_OATH_KEY"; //static Logger logger = LoggerFactory.getLogger(BatchServiceV2.class); static Logger accLogger = LoggerFactory.getLogger(BasicAuthenticationFilter.accessLoggerName); static final int networkQuerySizeLimit = 500000; /************************************************************************** * Injects the HTTP request into the base class to be used by * getLoggedInUser(). * * @param httpRequest * The HTTP request injected by RESTEasy's context. **************************************************************************/ public SearchServiceV2(@Context HttpServletRequest httpRequest) { super(httpRequest); } /************************************************************************** * Finds users based on the search parameters. * * @param searchParameters * The search parameters. * @throws Exception **************************************************************************/ @POST @PermitAll @AuthenticationNotRequired @Path("/user") @Produces("application/json") public static SolrSearchResult<User> findUsers( SimpleQuery simpleUserQuery, @DefaultValue("0") @QueryParam("start") int skipBlocks, @DefaultValue("100") @QueryParam("size") int blockSize ) throws Exception { accLogger.info("[data]\t[query:" +simpleUserQuery.getSearchString() + "]" ); try (UserDAO dao = new UserDAO ()){ final SolrSearchResult<User> users = dao.findUsers(simpleUserQuery, skipBlocks, blockSize); return users; } } /************************************************************************** * Find Groups based on search parameters - string matching for now * * @params searchParameters The search parameters. * @return Groups that match the search criteria. * @throws SQLException * @throws NdexException * @throws SolrServerException * @throws IOException **************************************************************************/ @POST @PermitAll @AuthenticationNotRequired @Path("/group") @Produces("application/json") public static SolrSearchResult<Group> findGroups(SimpleQuery simpleQuery, @DefaultValue("0") @QueryParam("start") int skipBlocks, @DefaultValue("100") @QueryParam("size") int blockSize) throws SQLException, IOException, SolrServerException, NdexException { // logger.info("[start: Search group \"{}\"]", simpleQuery.getSearchString()); accLogger.info("[data]\t[query:" +simpleQuery.getSearchString() + "]" ); try (GroupDAO dao = new GroupDAO()) { final SolrSearchResult<Group> groups = dao.findGroups(simpleQuery, skipBlocks, blockSize); // logger.info("[end: Search group \"{}\"]", simpleQuery.getSearchString()); return groups; } } @POST @PermitAll @Path("/network") @Produces("application/json") public NetworkSearchResult searchNetwork( final SimpleNetworkQuery query, @DefaultValue("0") @QueryParam("start") int skipBlocks, @DefaultValue("100") @QueryParam("size") int blockSize) throws IllegalArgumentException, NdexException { accLogger.info("[data]\t[acc:"+ query.getAccountName() + "]\t[query:" +query.getSearchString() + "]" ); if(query.getAccountName() != null) query.setAccountName(query.getAccountName().toLowerCase()); try (NetworkDAO dao = new NetworkDAO()) { NetworkSearchResult result = dao.findNetworks(query, skipBlocks, blockSize, this.getLoggedInUser()); return result; } catch (NdexException e1) { throw e1; } catch (Exception e) { e.printStackTrace(); throw new NdexException(e.getMessage()); } } @PermitAll @POST @Path("/network/{networkId}/nodes") @Produces("application/json") public SolrDocumentList queryNetworkNodes( @PathParam("networkId") final String networkIdStr, @QueryParam("accesskey") String accessKey, @DefaultValue("100") @QueryParam("limit") int limit, final SimpleQuery queryParameters ) throws NdexException, SQLException, SolrServerException, IOException { accLogger.info("[data]\t[query:" + queryParameters.getSearchString() + "]" ); UUID networkId = UUID.fromString(networkIdStr); try (NetworkDAO dao = new NetworkDAO()) { UUID userId = getLoggedInUserId(); if ( !dao.isReadable(networkId, userId) && !dao.accessKeyIsValid(networkId, accessKey)) { throw new UnauthorizedOperationException ("Unauthorized access to network " + networkId); } //checkIfQueryIsAllowed(networkId, dao); getSolrIdxReady(networkId, dao); } try (SingleNetworkSolrIdxManager solr = new SingleNetworkSolrIdxManager(networkId.toString())) { SolrDocumentList r = solr.getNodeIdsByQuery(queryParameters.getSearchString(), limit); return r; } /* Client client = ClientBuilder.newBuilder().build(); Map<String, Object> queryEntity = new TreeMap<>(); queryEntity.put("terms", queryParameters.getSearchString()); queryEntity.put("depth", queryParameters.getSearchDepth()); queryEntity.put("edgeLimit", queryParameters.getEdgeLimit()); String prefix = Configuration.getInstance().getProperty("NeighborhoodQueryURL"); WebTarget target = client.target(prefix + networkId + "/query"); Response response = target.request().post(Entity.entity(queryEntity, "application/json")); if ( response.getStatus()!=200) { Object obj = response.readEntity(Object.class); throw new NdexException(obj.toString()); } // String value = response.readEntity(String.class); // response.close(); InputStream in = response.readEntity(InputStream.class); return Response.ok().entity(in).build(); */ } @SuppressWarnings("resource") @PermitAll @POST @Path("/network/{networkId}/query") @Produces("application/json") public Response queryNetworkAsCX( @PathParam("networkId") final String networkIdStr, @QueryParam("accesskey") String accessKey, @DefaultValue("false") @QueryParam("save") boolean saveAsNetwork, final CXSimplePathQuery queryParameters ) throws NdexException, SQLException, URISyntaxException, SolrServerException, IOException { accLogger.info("[data]\t[depth:"+ queryParameters.getSearchDepth() + "][query:" + queryParameters.getSearchString() + "]" ); if ( queryParameters.getSearchDepth() <1) { queryParameters.setSearchDepth(1); } UUID networkId = UUID.fromString(networkIdStr); UUID userId = getLoggedInUserId(); if ( saveAsNetwork) { if (userId == null) throw new BadRequestException("Only authenticated users can save query results."); try (UserDAO dao = new UserDAO()) { dao.checkDiskSpace(userId); } } String networkName; try (NetworkDAO dao = new NetworkDAO()) { if ( !dao.isReadable(networkId, userId) && !dao.accessKeyIsValid(networkId, accessKey)) { throw new UnauthorizedOperationException ("Unauthorized access to network " + networkId); } networkName = dao.getNetworkName(networkId); getSolrIdxReady(networkId, dao); } /* ProvenanceEntity ei = new ProvenanceEntity(); ei.setUri(Configuration.getInstance().getHostURI() + Configuration.getInstance().getRestAPIPrefix()+"/network/"+ networkIdStr + "/summary" ); ei.addProperty("dc:title", networkName); */ if (networkName == null) networkName = "Neighborhood query result on unnamed network"; else networkName = "Neighborhood query result on network - " + networkName; Client client = ClientBuilder.newBuilder().build(); String prefix = Configuration.getInstance().getProperty("NeighborhoodQueryURL"); WebTarget target = client.target(prefix + networkId + "/query"); Response response = target.request().post(Entity.entity(queryParameters, "application/json")); if ( response.getStatus()!=200) { NDExError obj = response.readEntity(NDExError.class); throw new NdexException(obj.getMessage()); } InputStream in = response.readEntity(InputStream.class); if (saveAsNetwork) { /* ProvenanceEntity entity = new ProvenanceEntity(); ProvenanceEvent evt = new ProvenanceEvent("Neighborhood query",new Timestamp(Calendar.getInstance().getTimeInMillis())); evt.addProperty("Query terms", queryParameters.getSearchString()); evt.addProperty("Query depth", String.valueOf(queryParameters.getSearchDepth())); evt.addProperty( "user name", this.getLoggedInUser().getUserName()); evt.addInput(ei); entity.setCreationEvent(evt); */ return saveQueryResult(networkName, userId, getLoggedInUser().getUserName(), in); } return Response.ok().entity(in).build(); } private static void getSolrIdxReady(UUID networkId, NetworkDAO dao) throws SQLException, ObjectNotFoundException, SolrServerException, IOException, NdexException { int nodeCount = dao.getNodeCount(networkId); try (SingleNetworkSolrIdxManager solr = new SingleNetworkSolrIdxManager(networkId.toString())) { boolean ready = solr.isReady(nodeCount < SingleNetworkSolrIdxManager.AUTOCREATE_THRESHHOLD); if ( !ready ) { if (nodeCount < SingleNetworkSolrIdxManager.AUTOCREATE_THRESHHOLD) throw new NdexException ("Failed to create Solr Index on this network."); throw new NdexException("NDEx server hasn't finished creating index on this network yet. Please try again later"); } } } private Response saveQueryResult(String networkName, UUID ownerUUID,String ownerName, InputStream in) throws SQLException, URISyntaxException { // create a network entry in db UUID uuid = NdexUUIDFactory.INSTANCE.createNewNDExUUID(); try (NetworkDAO dao = new NetworkDAO()) { dao.CreateEmptyNetworkEntry(uuid, ownerUUID, ownerName, 0,networkName, null); // dao.setProvenance(uuid, entity); dao.commit(); } // start the saving thread. NetworkStreamSaverThread worker = new NetworkStreamSaverThread(uuid, in); worker.start(); // return the URL as resource String urlStr = Configuration.getInstance().getHostURI() + Configuration.getInstance().getRestAPIPrefix()+"/network/"+ uuid; URI l = new URI (urlStr); return Response.created(l).entity(l).build(); } private class NetworkStreamSaverThread extends Thread { UUID networkUUID; InputStream input; public NetworkStreamSaverThread(UUID networkId, InputStream in) { this.networkUUID = networkId; this.input = in; // this.owner = ownerName; } @Override public void run() { String pathPrefix = Configuration.getInstance().getNdexRoot() + "/data/" + networkUUID.toString(); // Create dir java.nio.file.Path dir = Paths.get(pathPrefix); Set<PosixFilePermission> perms = PosixFilePermissions.fromString("rwxrwxr-x"); FileAttribute<Set<PosixFilePermission>> attr = PosixFilePermissions.asFileAttribute(perms); try { Files.createDirectory(dir, attr); // write content to file File cxFile = new File(pathPrefix + "/network.cx"); java.nio.file.Files.copy(input, cxFile.toPath(), StandardCopyOption.REPLACE_EXISTING); long fileSize = cxFile.length(); try (NetworkDAO dao = new NetworkDAO()) { dao.setNetworkFileSize(networkUUID, fileSize); dao.commit(); } catch (SQLException | NdexException e2) { e2.printStackTrace(); try (NetworkDAO dao = new NetworkDAO()) { dao.setErrorMessage(networkUUID, "Failed to set network file size: " + e2.getMessage()); dao.unlockNetwork(networkUUID); } catch (SQLException e3) { e3.printStackTrace(); } } } catch (IOException e) { e.printStackTrace(); try (NetworkDAO dao = new NetworkDAO()) { dao.setErrorMessage(networkUUID, "Failed to create network file on the server: " + e.getMessage()); dao.unlockNetwork(networkUUID); } catch (SQLException e2) { e2.printStackTrace(); } } IOUtils.closeQuietly(input); try (NetworkDAO dao = new NetworkDAO ()) { try ( CXNetworkLoader loader = new CXNetworkLoader(networkUUID, false,dao, VisibilityType.PRIVATE, null, 5000) ) { loader.persistCXNetwork(); } catch ( IOException | NdexException | SQLException | RuntimeException | SolrServerException e1) { e1.printStackTrace(); try { dao.setErrorMessage(networkUUID, e1.getMessage()); dao.setFlag(networkUUID, "readonly", false); try { dao.updateNetworkVisibility(networkUUID, VisibilityType.PRIVATE, true); } catch (NdexException e) { System.out.print("Error when updating network visibility: " + e.getMessage()); e.printStackTrace(); } dao.unlockNetwork(networkUUID); } catch (SQLException e) { System.out.println("Failed to set Error for network " + networkUUID); e.printStackTrace(); } } } catch (SQLException e) { e.printStackTrace(); } } } @PermitAll @POST @Path("/network/{networkId}/interconnectquery") @Produces("application/json") public Response interconnectQuery( @PathParam("networkId") final String networkIdStr, @QueryParam("accesskey") String accessKey, @DefaultValue("false") @QueryParam("save") boolean saveAsNetwork, final CXSimplePathQuery queryParameters ) throws NdexException, SQLException, URISyntaxException, SolrServerException, IOException { accLogger.info("[data]\t[depth:"+ queryParameters.getSearchDepth() + "][query:" + queryParameters.getSearchString() + "]" ); if ( queryParameters.getSearchDepth() <1) { queryParameters.setSearchDepth(1); } UUID networkId = UUID.fromString(networkIdStr); UUID userId = getLoggedInUserId(); if ( saveAsNetwork) { if (userId == null) throw new BadRequestException("Only authenticated users can save query results."); try (UserDAO dao = new UserDAO()) { dao.checkDiskSpace(userId); } } String networkName; try (NetworkDAO dao = new NetworkDAO()) { if ( !dao.isReadable(networkId, userId) && !dao.accessKeyIsValid(networkId, accessKey)) { throw new UnauthorizedOperationException ("Unauthorized access to network " + networkId); } getSolrIdxReady(networkId, dao); networkName = dao.getNetworkName(networkId); } /* ProvenanceEntity ei = new ProvenanceEntity(); ei.setUri(Configuration.getInstance().getHostURI() + Configuration.getInstance().getRestAPIPrefix()+"/network/"+ networkIdStr + "/summary" ); ei.addProperty("dc:title", networkName); */ if (networkName == null) networkName = "Interconnect query result on unnamed network"; else networkName = "Interconnect query result on network - " + networkName; Client client = ClientBuilder.newBuilder().build(); /*Map<String, Object> queryEntity = new TreeMap<>(); queryEntity.put("terms", queryParameters.getSearchString()); queryEntity.put("searchDepth", queryParameters.getSearchDepth()); queryEntity.put("edgeLimit", queryParameters.getEdgeLimit()); queryEntity */ String prefix = Configuration.getInstance().getProperty("NeighborhoodQueryURL"); WebTarget target = client.target(prefix + networkId + "/interconnectquery"); Response response = target.request().post(Entity.entity(queryParameters, "application/json")); if ( response.getStatus()!=200) { NDExError obj = response.readEntity(NDExError.class); throw new NdexException(obj.getMessage()); } // String value = response.readEntity(String.class); // response.close(); InputStream in = response.readEntity(InputStream.class); if (saveAsNetwork) { /* ProvenanceEntity entity = new ProvenanceEntity(); ProvenanceEvent evt = new ProvenanceEvent("Interconnect query",new Timestamp(Calendar.getInstance().getTimeInMillis())); evt.addProperty("Query terms", queryParameters.getSearchString()); evt.addProperty( "user name", this.getLoggedInUser().getUserName()); evt.addInput(ei); entity.setCreationEvent(evt); */ return saveQueryResult(networkName, userId, getLoggedInUser().getUserName(), in); } return Response.ok().entity(in).build(); } @PermitAll @POST @Path("/network/{networkId}/advancedquery") @Produces("application/json") public Response advancedQuery( @PathParam("networkId") final String networkIdStr, @QueryParam("accesskey") String accessKey, final EdgeCollectionQuery queryParameters ) throws NdexException, SQLException { /* if ( networkIdStr.equals("0000")) return Response.ok().build();*/ ObjectMapper mapper = new ObjectMapper(); try { accLogger.info("[data]\t[query:" + mapper.writeValueAsString(queryParameters)+ "]" ); } catch (JsonProcessingException ee) { logger.info("Failed to generate json string for logging in function SearchServiceV2.advancedQuery:" + ee.getMessage()); } UUID networkId = UUID.fromString(networkIdStr); try (NetworkDAO dao = new NetworkDAO()) { UUID userId = getLoggedInUserId(); if ( !dao.isReadable(networkId, userId) && !dao.accessKeyIsValid(networkId, accessKey)) { throw new UnauthorizedOperationException ("Unauthorized access to network " + networkId); } checkIfQueryIsAllowed(networkId, dao); } Client client = ClientBuilder.newBuilder().build(); /*Map<String, Object> queryEntity = new TreeMap<>(); queryEntity.put("terms", queryParameters.getSearchString()); queryEntity.put("depth", queryParameters.getSearchDepth()); queryEntity.put("edgeLimit", queryParameters.getEdgeLimit()); */ String prefix = Configuration.getInstance().getProperty("AdvancedQueryURL"); WebTarget target = client.target(prefix + networkId + "/advancedquery"); Response response = target.request().post(Entity.entity(queryParameters, "application/json")); if ( response.getStatus()!=200) { Object obj = response.readEntity(Object.class); throw new NdexException(obj.toString()); } // String value = response.readEntity(String.class); // response.close(); InputStream in = response.readEntity(InputStream.class); return Response.ok().entity(in).build(); } private static void checkIfQueryIsAllowed(UUID networkId, NetworkDAO dao) throws ForbiddenOperationException, ObjectNotFoundException, SQLException { if ( dao.getNetworkEdgeCount(networkId) > networkQuerySizeLimit) throw new ForbiddenOperationException("Query on networks that have over " + networkQuerySizeLimit + " edges is not supported in this release. " + "You can download the network and process it on your own computer. Please email [email protected] if you need further assistance."); } @POST @PermitAll @Path("/network/genes") @Produces("application/json") public NetworkSearchResult searchNetworkByGenes( final SimpleQuery geneQuery, @DefaultValue("0") @QueryParam("start") int skipBlocks, @DefaultValue("100") @QueryParam("size") int blockSize) throws IllegalArgumentException, NdexException { accLogger.info("[data]\t[query:" +geneQuery.getSearchString() + "]" ); if ( geneQuery.getSearchString().trim().length() == 0 || geneQuery.getSearchString().trim().equals("*")) { try (NetworkDAO dao = new NetworkDAO()) { SimpleNetworkQuery finalQuery = new SimpleNetworkQuery(); finalQuery.setSearchString(geneQuery.getSearchString()); NetworkSearchResult result = dao.findNetworks(finalQuery, skipBlocks, blockSize, this.getLoggedInUser()); return result; } catch (Exception e) { throw new NdexException(e.getMessage()); } } String[] query = geneQuery.getSearchString().split("(,|\\s)+(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1); Set<String> processedTerms = new HashSet<>(query.length); for ( String q : query) { if ( q.startsWith("\"") && q.endsWith("\"") || q.startsWith("\'") && q.endsWith("\'") ) processedTerms.add(q.substring(1, q.length()-1)); else processedTerms.add(q); } if ( processedTerms.size() == 0) return new NetworkSearchResult(); Set<String> r = expandGeneSearchTerms(processedTerms); StringBuilder lStr = new StringBuilder (); for ( String os : processedTerms) lStr.append("\"" + os + "\" "); for ( String i : r) { if (! processedTerms.contains(i)) lStr.append( "\"" + i + "\" "); //else // System.out.println("term " + i + " is in query, ignoring it."); } SimpleNetworkQuery finalQuery = new SimpleNetworkQuery(); finalQuery.setSearchString(lStr.toString()); logger.info("Final search string is ("+ lStr.length()+"): " + lStr.toString()); try (NetworkDAO dao = new NetworkDAO()) { NetworkSearchResult result = dao.findNetworks(finalQuery, skipBlocks, blockSize, this.getLoggedInUser()); return result; } catch (Exception e) { throw new NdexException(e.getMessage()); } } private static Set<String> expandGeneSearchTerms(Collection<String> geneSearchTerms) throws NdexException { Client client = ClientBuilder.newBuilder().build(); Set<String> expendedTerms = new HashSet<> (); MultivaluedMap<String, String> formData = new MultivaluedHashMap<>(); StringBuilder lStr = new StringBuilder (); for ( String i : geneSearchTerms) if ( i.trim().length() != 0 ) lStr.append( i + ","); if (lStr.length()<1) return expendedTerms; String q = lStr.substring(0,lStr.length()-1); formData.add("q", q); formData.add("scopes", "symbol,entrezgene,ensemblgene,alias,uniprot"); formData.add("fields", "symbol,name,taxid,entrezgene,ensembl.gene,alias,uniprot,MGI,RGD,HGNC"); formData.add("species", "9606"); formData.add("dotfield", "true"); // lStr.append("&scope=symbol,entrezgene,ensemblgene,alias,uniprot&fields=symbol,name,taxid,entrezgene,ensembl.gene,alias,uniprot&dotfield=true"); WebTarget target = client.target("http://mygene.info/v3/query"); Response response = target.request().post(Entity.form(formData)); if ( response.getStatus()!=200) { Object obj = response.readEntity(Object.class); throw new NdexException(obj.toString()); } // Object expensionResult = response.readEntity(Object.class); // System.out.println(expensionResult); List<Map<String,Object>> expensionResult = response.readEntity(new GenericType<List<Map<String,Object>>>() {}); Set<String> missList = new HashSet<> (); for ( Map<String,Object> termObj : expensionResult) { Boolean notFound = (Boolean)termObj.get("notfound"); if ( notFound!=null && notFound.booleanValue()) { missList.add((String)termObj.get("query")); continue; } addTermsToExpensionSet(termObj.get("ensembl.gene"), expendedTerms); addTermsToExpensionSet(termObj.get("symbol"), expendedTerms); String id = (String)termObj.get("entrezgene"); if ( id !=null) expendedTerms.add(id); String term = (String) termObj.get("uniprot.Swiss-Prot"); if ( term !=null) expendedTerms.add(term); addTermsToExpensionSet (termObj.get("alias"), expendedTerms); addTermsToExpensionSet (termObj.get("uniprot.TrEMBL"), expendedTerms); addSingleTermToExpensionSet (termObj.get("entrezgene"), expendedTerms); addSingleTermToExpensionSet (termObj.get("name"), expendedTerms); String hgnc = (String)termObj.get("HGNC"); if ( hgnc !=null) expendedTerms.add("hgnc:" + hgnc); // addSingleTermToExpensionSet (termObj.get("MGI"), expendedTerms); } return expendedTerms; } private static void addTermsToExpensionSet(Object term, Set<String> expendedTerms) { if ( term !=null) { if (term instanceof String) { expendedTerms.add((String)term); } else { expendedTerms.addAll((List<String>) term); } } } private static void addSingleTermToExpensionSet(Object term, Set<String> expendedTerms) { if ( term !=null) { if (term instanceof String) { expendedTerms.add((String)term); } else { expendedTerms.add(term.toString()); } } } }
src/main/java/org/ndexbio/rest/services/SearchServiceV2.java
/** * Copyright (c) 2013, 2016, The Regents of the University of California, The Cytoscape Consortium * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ package org.ndexbio.rest.services; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; import java.sql.SQLException; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import javax.annotation.security.PermitAll; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.DefaultValue; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Context; import javax.ws.rs.core.GenericType; import javax.ws.rs.core.MultivaluedHashMap; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import org.apache.commons.io.IOUtils; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.common.SolrDocumentList; import org.ndexbio.common.models.dao.postgresql.GroupDAO; import org.ndexbio.common.models.dao.postgresql.NetworkDAO; import org.ndexbio.common.models.dao.postgresql.UserDAO; import org.ndexbio.common.persistence.CXNetworkLoader; import org.ndexbio.common.solr.SingleNetworkSolrIdxManager; import org.ndexbio.common.util.NdexUUIDFactory; import org.ndexbio.model.errorcodes.NDExError; import org.ndexbio.model.exceptions.BadRequestException; import org.ndexbio.model.exceptions.ForbiddenOperationException; import org.ndexbio.model.exceptions.NdexException; import org.ndexbio.model.exceptions.ObjectNotFoundException; import org.ndexbio.model.exceptions.UnauthorizedOperationException; import org.ndexbio.model.network.query.EdgeCollectionQuery; import org.ndexbio.model.object.CXSimplePathQuery; import org.ndexbio.model.object.Group; import org.ndexbio.model.object.NetworkSearchResult; import org.ndexbio.model.object.SimpleNetworkQuery; import org.ndexbio.model.object.SimpleQuery; import org.ndexbio.model.object.SolrSearchResult; import org.ndexbio.model.object.User; import org.ndexbio.model.object.network.VisibilityType; import org.ndexbio.rest.Configuration; import org.ndexbio.rest.filters.BasicAuthenticationFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @Path("/v2/search") public class SearchServiceV2 extends NdexService { // private static final String GOOGLE_OAUTH_FLAG = "USE_GOOGLE_AUTHENTICATION"; // private static final String GOOGLE_OATH_KEY = "GOOGLE_OATH_KEY"; //static Logger logger = LoggerFactory.getLogger(BatchServiceV2.class); static Logger accLogger = LoggerFactory.getLogger(BasicAuthenticationFilter.accessLoggerName); static final int networkQuerySizeLimit = 500000; /************************************************************************** * Injects the HTTP request into the base class to be used by * getLoggedInUser(). * * @param httpRequest * The HTTP request injected by RESTEasy's context. **************************************************************************/ public SearchServiceV2(@Context HttpServletRequest httpRequest) { super(httpRequest); } /************************************************************************** * Finds users based on the search parameters. * * @param searchParameters * The search parameters. * @throws Exception **************************************************************************/ @POST @PermitAll @AuthenticationNotRequired @Path("/user") @Produces("application/json") public static SolrSearchResult<User> findUsers( SimpleQuery simpleUserQuery, @DefaultValue("0") @QueryParam("start") int skipBlocks, @DefaultValue("100") @QueryParam("size") int blockSize ) throws Exception { accLogger.info("[data]\t[query:" +simpleUserQuery.getSearchString() + "]" ); try (UserDAO dao = new UserDAO ()){ final SolrSearchResult<User> users = dao.findUsers(simpleUserQuery, skipBlocks, blockSize); return users; } } /************************************************************************** * Find Groups based on search parameters - string matching for now * * @params searchParameters The search parameters. * @return Groups that match the search criteria. * @throws SQLException * @throws NdexException * @throws SolrServerException * @throws IOException **************************************************************************/ @POST @PermitAll @AuthenticationNotRequired @Path("/group") @Produces("application/json") public static SolrSearchResult<Group> findGroups(SimpleQuery simpleQuery, @DefaultValue("0") @QueryParam("start") int skipBlocks, @DefaultValue("100") @QueryParam("size") int blockSize) throws SQLException, IOException, SolrServerException, NdexException { // logger.info("[start: Search group \"{}\"]", simpleQuery.getSearchString()); accLogger.info("[data]\t[query:" +simpleQuery.getSearchString() + "]" ); try (GroupDAO dao = new GroupDAO()) { final SolrSearchResult<Group> groups = dao.findGroups(simpleQuery, skipBlocks, blockSize); // logger.info("[end: Search group \"{}\"]", simpleQuery.getSearchString()); return groups; } } @POST @PermitAll @Path("/network") @Produces("application/json") public NetworkSearchResult searchNetwork( final SimpleNetworkQuery query, @DefaultValue("0") @QueryParam("start") int skipBlocks, @DefaultValue("100") @QueryParam("size") int blockSize) throws IllegalArgumentException, NdexException { accLogger.info("[data]\t[acc:"+ query.getAccountName() + "]\t[query:" +query.getSearchString() + "]" ); if(query.getAccountName() != null) query.setAccountName(query.getAccountName().toLowerCase()); try (NetworkDAO dao = new NetworkDAO()) { NetworkSearchResult result = dao.findNetworks(query, skipBlocks, blockSize, this.getLoggedInUser()); return result; } catch (NdexException e1) { throw e1; } catch (Exception e) { e.printStackTrace(); throw new NdexException(e.getMessage()); } } @PermitAll @POST @Path("/network/{networkId}/nodes") @Produces("application/json") public SolrDocumentList queryNetworkNodes( @PathParam("networkId") final String networkIdStr, @QueryParam("accesskey") String accessKey, @DefaultValue("100") @QueryParam("limit") int limit, final SimpleQuery queryParameters ) throws NdexException, SQLException, SolrServerException, IOException { accLogger.info("[data]\t[query:" + queryParameters.getSearchString() + "]" ); UUID networkId = UUID.fromString(networkIdStr); try (NetworkDAO dao = new NetworkDAO()) { UUID userId = getLoggedInUserId(); if ( !dao.isReadable(networkId, userId) && !dao.accessKeyIsValid(networkId, accessKey)) { throw new UnauthorizedOperationException ("Unauthorized access to network " + networkId); } checkIfQueryIsAllowed(networkId, dao); getSolrIdxReady(networkId, dao); } try (SingleNetworkSolrIdxManager solr = new SingleNetworkSolrIdxManager(networkId.toString())) { SolrDocumentList r = solr.getNodeIdsByQuery(queryParameters.getSearchString(), limit); return r; } /* Client client = ClientBuilder.newBuilder().build(); Map<String, Object> queryEntity = new TreeMap<>(); queryEntity.put("terms", queryParameters.getSearchString()); queryEntity.put("depth", queryParameters.getSearchDepth()); queryEntity.put("edgeLimit", queryParameters.getEdgeLimit()); String prefix = Configuration.getInstance().getProperty("NeighborhoodQueryURL"); WebTarget target = client.target(prefix + networkId + "/query"); Response response = target.request().post(Entity.entity(queryEntity, "application/json")); if ( response.getStatus()!=200) { Object obj = response.readEntity(Object.class); throw new NdexException(obj.toString()); } // String value = response.readEntity(String.class); // response.close(); InputStream in = response.readEntity(InputStream.class); return Response.ok().entity(in).build(); */ } @SuppressWarnings("resource") @PermitAll @POST @Path("/network/{networkId}/query") @Produces("application/json") public Response queryNetworkAsCX( @PathParam("networkId") final String networkIdStr, @QueryParam("accesskey") String accessKey, @DefaultValue("false") @QueryParam("save") boolean saveAsNetwork, final CXSimplePathQuery queryParameters ) throws NdexException, SQLException, URISyntaxException, SolrServerException, IOException { accLogger.info("[data]\t[depth:"+ queryParameters.getSearchDepth() + "][query:" + queryParameters.getSearchString() + "]" ); if ( queryParameters.getSearchDepth() <1) { queryParameters.setSearchDepth(1); } UUID networkId = UUID.fromString(networkIdStr); UUID userId = getLoggedInUserId(); if ( saveAsNetwork) { if (userId == null) throw new BadRequestException("Only authenticated users can save query results."); try (UserDAO dao = new UserDAO()) { dao.checkDiskSpace(userId); } } String networkName; try (NetworkDAO dao = new NetworkDAO()) { if ( !dao.isReadable(networkId, userId) && !dao.accessKeyIsValid(networkId, accessKey)) { throw new UnauthorizedOperationException ("Unauthorized access to network " + networkId); } networkName = dao.getNetworkName(networkId); getSolrIdxReady(networkId, dao); } /* ProvenanceEntity ei = new ProvenanceEntity(); ei.setUri(Configuration.getInstance().getHostURI() + Configuration.getInstance().getRestAPIPrefix()+"/network/"+ networkIdStr + "/summary" ); ei.addProperty("dc:title", networkName); */ if (networkName == null) networkName = "Neighborhood query result on unnamed network"; else networkName = "Neighborhood query result on network - " + networkName; Client client = ClientBuilder.newBuilder().build(); String prefix = Configuration.getInstance().getProperty("NeighborhoodQueryURL"); WebTarget target = client.target(prefix + networkId + "/query"); Response response = target.request().post(Entity.entity(queryParameters, "application/json")); if ( response.getStatus()!=200) { NDExError obj = response.readEntity(NDExError.class); throw new NdexException(obj.getMessage()); } InputStream in = response.readEntity(InputStream.class); if (saveAsNetwork) { /* ProvenanceEntity entity = new ProvenanceEntity(); ProvenanceEvent evt = new ProvenanceEvent("Neighborhood query",new Timestamp(Calendar.getInstance().getTimeInMillis())); evt.addProperty("Query terms", queryParameters.getSearchString()); evt.addProperty("Query depth", String.valueOf(queryParameters.getSearchDepth())); evt.addProperty( "user name", this.getLoggedInUser().getUserName()); evt.addInput(ei); entity.setCreationEvent(evt); */ return saveQueryResult(networkName, userId, getLoggedInUser().getUserName(), in); } return Response.ok().entity(in).build(); } private static void getSolrIdxReady(UUID networkId, NetworkDAO dao) throws SQLException, ObjectNotFoundException, SolrServerException, IOException, NdexException { int nodeCount = dao.getNodeCount(networkId); try (SingleNetworkSolrIdxManager solr = new SingleNetworkSolrIdxManager(networkId.toString())) { boolean ready = solr.isReady(nodeCount < SingleNetworkSolrIdxManager.AUTOCREATE_THRESHHOLD); if ( !ready ) { if (nodeCount < SingleNetworkSolrIdxManager.AUTOCREATE_THRESHHOLD) throw new NdexException ("Failed to create Solr Index on this network."); throw new NdexException("NDEx server hasn't finished creating index on this network yet. Please try again later"); } } } private Response saveQueryResult(String networkName, UUID ownerUUID,String ownerName, InputStream in) throws SQLException, URISyntaxException { // create a network entry in db UUID uuid = NdexUUIDFactory.INSTANCE.createNewNDExUUID(); try (NetworkDAO dao = new NetworkDAO()) { dao.CreateEmptyNetworkEntry(uuid, ownerUUID, ownerName, 0,networkName, null); // dao.setProvenance(uuid, entity); dao.commit(); } // start the saving thread. NetworkStreamSaverThread worker = new NetworkStreamSaverThread(uuid, in); worker.start(); // return the URL as resource String urlStr = Configuration.getInstance().getHostURI() + Configuration.getInstance().getRestAPIPrefix()+"/network/"+ uuid; URI l = new URI (urlStr); return Response.created(l).entity(l).build(); } private class NetworkStreamSaverThread extends Thread { UUID networkUUID; InputStream input; public NetworkStreamSaverThread(UUID networkId, InputStream in) { this.networkUUID = networkId; this.input = in; // this.owner = ownerName; } @Override public void run() { String pathPrefix = Configuration.getInstance().getNdexRoot() + "/data/" + networkUUID.toString(); // Create dir java.nio.file.Path dir = Paths.get(pathPrefix); Set<PosixFilePermission> perms = PosixFilePermissions.fromString("rwxrwxr-x"); FileAttribute<Set<PosixFilePermission>> attr = PosixFilePermissions.asFileAttribute(perms); try { Files.createDirectory(dir, attr); // write content to file File cxFile = new File(pathPrefix + "/network.cx"); java.nio.file.Files.copy(input, cxFile.toPath(), StandardCopyOption.REPLACE_EXISTING); long fileSize = cxFile.length(); try (NetworkDAO dao = new NetworkDAO()) { dao.setNetworkFileSize(networkUUID, fileSize); dao.commit(); } catch (SQLException | NdexException e2) { e2.printStackTrace(); try (NetworkDAO dao = new NetworkDAO()) { dao.setErrorMessage(networkUUID, "Failed to set network file size: " + e2.getMessage()); dao.unlockNetwork(networkUUID); } catch (SQLException e3) { e3.printStackTrace(); } } } catch (IOException e) { e.printStackTrace(); try (NetworkDAO dao = new NetworkDAO()) { dao.setErrorMessage(networkUUID, "Failed to create network file on the server: " + e.getMessage()); dao.unlockNetwork(networkUUID); } catch (SQLException e2) { e2.printStackTrace(); } } IOUtils.closeQuietly(input); try (NetworkDAO dao = new NetworkDAO ()) { try ( CXNetworkLoader loader = new CXNetworkLoader(networkUUID, false,dao, VisibilityType.PRIVATE, null, 5000) ) { loader.persistCXNetwork(); } catch ( IOException | NdexException | SQLException | RuntimeException | SolrServerException e1) { e1.printStackTrace(); try { dao.setErrorMessage(networkUUID, e1.getMessage()); dao.setFlag(networkUUID, "readonly", false); try { dao.updateNetworkVisibility(networkUUID, VisibilityType.PRIVATE, true); } catch (NdexException e) { System.out.print("Error when updating network visibility: " + e.getMessage()); e.printStackTrace(); } dao.unlockNetwork(networkUUID); } catch (SQLException e) { System.out.println("Failed to set Error for network " + networkUUID); e.printStackTrace(); } } } catch (SQLException e) { e.printStackTrace(); } } } @PermitAll @POST @Path("/network/{networkId}/interconnectquery") @Produces("application/json") public Response interconnectQuery( @PathParam("networkId") final String networkIdStr, @QueryParam("accesskey") String accessKey, @DefaultValue("false") @QueryParam("save") boolean saveAsNetwork, final CXSimplePathQuery queryParameters ) throws NdexException, SQLException, URISyntaxException, SolrServerException, IOException { accLogger.info("[data]\t[depth:"+ queryParameters.getSearchDepth() + "][query:" + queryParameters.getSearchString() + "]" ); if ( queryParameters.getSearchDepth() <1) { queryParameters.setSearchDepth(1); } UUID networkId = UUID.fromString(networkIdStr); UUID userId = getLoggedInUserId(); if ( saveAsNetwork) { if (userId == null) throw new BadRequestException("Only authenticated users can save query results."); try (UserDAO dao = new UserDAO()) { dao.checkDiskSpace(userId); } } String networkName; try (NetworkDAO dao = new NetworkDAO()) { if ( !dao.isReadable(networkId, userId) && !dao.accessKeyIsValid(networkId, accessKey)) { throw new UnauthorizedOperationException ("Unauthorized access to network " + networkId); } getSolrIdxReady(networkId, dao); networkName = dao.getNetworkName(networkId); } /* ProvenanceEntity ei = new ProvenanceEntity(); ei.setUri(Configuration.getInstance().getHostURI() + Configuration.getInstance().getRestAPIPrefix()+"/network/"+ networkIdStr + "/summary" ); ei.addProperty("dc:title", networkName); */ if (networkName == null) networkName = "Interconnect query result on unnamed network"; else networkName = "Interconnect query result on network - " + networkName; Client client = ClientBuilder.newBuilder().build(); /*Map<String, Object> queryEntity = new TreeMap<>(); queryEntity.put("terms", queryParameters.getSearchString()); queryEntity.put("searchDepth", queryParameters.getSearchDepth()); queryEntity.put("edgeLimit", queryParameters.getEdgeLimit()); queryEntity */ String prefix = Configuration.getInstance().getProperty("NeighborhoodQueryURL"); WebTarget target = client.target(prefix + networkId + "/interconnectquery"); Response response = target.request().post(Entity.entity(queryParameters, "application/json")); if ( response.getStatus()!=200) { NDExError obj = response.readEntity(NDExError.class); throw new NdexException(obj.getMessage()); } // String value = response.readEntity(String.class); // response.close(); InputStream in = response.readEntity(InputStream.class); if (saveAsNetwork) { /* ProvenanceEntity entity = new ProvenanceEntity(); ProvenanceEvent evt = new ProvenanceEvent("Interconnect query",new Timestamp(Calendar.getInstance().getTimeInMillis())); evt.addProperty("Query terms", queryParameters.getSearchString()); evt.addProperty( "user name", this.getLoggedInUser().getUserName()); evt.addInput(ei); entity.setCreationEvent(evt); */ return saveQueryResult(networkName, userId, getLoggedInUser().getUserName(), in); } return Response.ok().entity(in).build(); } @PermitAll @POST @Path("/network/{networkId}/advancedquery") @Produces("application/json") public Response advancedQuery( @PathParam("networkId") final String networkIdStr, @QueryParam("accesskey") String accessKey, final EdgeCollectionQuery queryParameters ) throws NdexException, SQLException { /* if ( networkIdStr.equals("0000")) return Response.ok().build();*/ ObjectMapper mapper = new ObjectMapper(); try { accLogger.info("[data]\t[query:" + mapper.writeValueAsString(queryParameters)+ "]" ); } catch (JsonProcessingException ee) { logger.info("Failed to generate json string for logging in function SearchServiceV2.advancedQuery:" + ee.getMessage()); } UUID networkId = UUID.fromString(networkIdStr); try (NetworkDAO dao = new NetworkDAO()) { UUID userId = getLoggedInUserId(); if ( !dao.isReadable(networkId, userId) && !dao.accessKeyIsValid(networkId, accessKey)) { throw new UnauthorizedOperationException ("Unauthorized access to network " + networkId); } checkIfQueryIsAllowed(networkId, dao); } Client client = ClientBuilder.newBuilder().build(); /*Map<String, Object> queryEntity = new TreeMap<>(); queryEntity.put("terms", queryParameters.getSearchString()); queryEntity.put("depth", queryParameters.getSearchDepth()); queryEntity.put("edgeLimit", queryParameters.getEdgeLimit()); */ String prefix = Configuration.getInstance().getProperty("AdvancedQueryURL"); WebTarget target = client.target(prefix + networkId + "/advancedquery"); Response response = target.request().post(Entity.entity(queryParameters, "application/json")); if ( response.getStatus()!=200) { Object obj = response.readEntity(Object.class); throw new NdexException(obj.toString()); } // String value = response.readEntity(String.class); // response.close(); InputStream in = response.readEntity(InputStream.class); return Response.ok().entity(in).build(); } private static void checkIfQueryIsAllowed(UUID networkId, NetworkDAO dao) throws ForbiddenOperationException, ObjectNotFoundException, SQLException { if ( dao.getNetworkEdgeCount(networkId) > networkQuerySizeLimit) throw new ForbiddenOperationException("Query on networks that have over " + networkQuerySizeLimit + " edges is not supported in this release. " + "You can download the network and process it on your own computer. Please email [email protected] if you need further assistance."); } @POST @PermitAll @Path("/network/genes") @Produces("application/json") public NetworkSearchResult searchNetworkByGenes( final SimpleQuery geneQuery, @DefaultValue("0") @QueryParam("start") int skipBlocks, @DefaultValue("100") @QueryParam("size") int blockSize) throws IllegalArgumentException, NdexException { accLogger.info("[data]\t[query:" +geneQuery.getSearchString() + "]" ); if ( geneQuery.getSearchString().trim().length() == 0 || geneQuery.getSearchString().trim().equals("*")) { try (NetworkDAO dao = new NetworkDAO()) { SimpleNetworkQuery finalQuery = new SimpleNetworkQuery(); finalQuery.setSearchString(geneQuery.getSearchString()); NetworkSearchResult result = dao.findNetworks(finalQuery, skipBlocks, blockSize, this.getLoggedInUser()); return result; } catch (Exception e) { throw new NdexException(e.getMessage()); } } String[] query = geneQuery.getSearchString().split("(,|\\s)+(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1); Set<String> processedTerms = new HashSet<>(query.length); for ( String q : query) { if ( q.startsWith("\"") && q.endsWith("\"") || q.startsWith("\'") && q.endsWith("\'") ) processedTerms.add(q.substring(1, q.length()-1)); else processedTerms.add(q); } if ( processedTerms.size() == 0) return new NetworkSearchResult(); Set<String> r = expandGeneSearchTerms(processedTerms); StringBuilder lStr = new StringBuilder (); for ( String os : processedTerms) lStr.append("\"" + os + "\" "); for ( String i : r) { if (! processedTerms.contains(i)) lStr.append( "\"" + i + "\" "); //else // System.out.println("term " + i + " is in query, ignoring it."); } SimpleNetworkQuery finalQuery = new SimpleNetworkQuery(); finalQuery.setSearchString(lStr.toString()); logger.info("Final search string is ("+ lStr.length()+"): " + lStr.toString()); try (NetworkDAO dao = new NetworkDAO()) { NetworkSearchResult result = dao.findNetworks(finalQuery, skipBlocks, blockSize, this.getLoggedInUser()); return result; } catch (Exception e) { throw new NdexException(e.getMessage()); } } private static Set<String> expandGeneSearchTerms(Collection<String> geneSearchTerms) throws NdexException { Client client = ClientBuilder.newBuilder().build(); Set<String> expendedTerms = new HashSet<> (); MultivaluedMap<String, String> formData = new MultivaluedHashMap<>(); StringBuilder lStr = new StringBuilder (); for ( String i : geneSearchTerms) if ( i.trim().length() != 0 ) lStr.append( i + ","); if (lStr.length()<1) return expendedTerms; String q = lStr.substring(0,lStr.length()-1); formData.add("q", q); formData.add("scopes", "symbol,entrezgene,ensemblgene,alias,uniprot"); formData.add("fields", "symbol,name,taxid,entrezgene,ensembl.gene,alias,uniprot,MGI,RGD,HGNC"); formData.add("species", "9606"); formData.add("dotfield", "true"); // lStr.append("&scope=symbol,entrezgene,ensemblgene,alias,uniprot&fields=symbol,name,taxid,entrezgene,ensembl.gene,alias,uniprot&dotfield=true"); WebTarget target = client.target("http://mygene.info/v3/query"); Response response = target.request().post(Entity.form(formData)); if ( response.getStatus()!=200) { Object obj = response.readEntity(Object.class); throw new NdexException(obj.toString()); } // Object expensionResult = response.readEntity(Object.class); // System.out.println(expensionResult); List<Map<String,Object>> expensionResult = response.readEntity(new GenericType<List<Map<String,Object>>>() {}); Set<String> missList = new HashSet<> (); for ( Map<String,Object> termObj : expensionResult) { Boolean notFound = (Boolean)termObj.get("notfound"); if ( notFound!=null && notFound.booleanValue()) { missList.add((String)termObj.get("query")); continue; } addTermsToExpensionSet(termObj.get("ensembl.gene"), expendedTerms); addTermsToExpensionSet(termObj.get("symbol"), expendedTerms); String id = (String)termObj.get("entrezgene"); if ( id !=null) expendedTerms.add(id); String term = (String) termObj.get("uniprot.Swiss-Prot"); if ( term !=null) expendedTerms.add(term); addTermsToExpensionSet (termObj.get("alias"), expendedTerms); addTermsToExpensionSet (termObj.get("uniprot.TrEMBL"), expendedTerms); addSingleTermToExpensionSet (termObj.get("entrezgene"), expendedTerms); addSingleTermToExpensionSet (termObj.get("name"), expendedTerms); String hgnc = (String)termObj.get("HGNC"); if ( hgnc !=null) expendedTerms.add("hgnc:" + hgnc); // addSingleTermToExpensionSet (termObj.get("MGI"), expendedTerms); } return expendedTerms; } private static void addTermsToExpensionSet(Object term, Set<String> expendedTerms) { if ( term !=null) { if (term instanceof String) { expendedTerms.add((String)term); } else { expendedTerms.addAll((List<String>) term); } } } private static void addSingleTermToExpensionSet(Object term, Set<String> expendedTerms) { if ( term !=null) { if (term instanceof String) { expendedTerms.add((String)term); } else { expendedTerms.add(term.toString()); } } } }
Fix UD-2265. Network size checking is removed from node query function.
src/main/java/org/ndexbio/rest/services/SearchServiceV2.java
Fix UD-2265. Network size checking is removed from node query function.
Java
mit
be09a2d6654b28aa713b722a4f8bf2b0b86dd28f
0
sasmita/iot-devkit-samples,sasmita/iot-devkit-samples
/* * Author: Sisinty Sasmita Patra <[email protected]> * Author: Petre Eftime <[email protected]> * * Copyright (c) 2015 Intel Corporation. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /** * @file * @ingroup grove * @brief Various Sensors * * Demonstrate how to read a digital value from an input pin using the MRAA * library. * Suitable ones in the Grove Starter Kit are the Button and Touch Sensor. * * @hardware - Grove Temperature Sensor connected to Grove Base Shield Port A1 * - JHD1313M1 LCD connected to any I2C port on the Grove Base Shield * - Grove Button connected to the Grove Base Shield Port D2 * - Grove Rotary connected to the Grove Base Shield Port A0 * - Grove Light connected to the Grove Base Shield Port A2 * - Grove Touch Sensor connected to the Grove Base Shield Port D4 * - MMA7660 Accelerometer connected to any I2C port on the Grove Base Shield * * @req mraa.jar * @req upm_grove.jar * @req upm_i2clcd.jar * @req upm_ttp223.jar * @req upm_mma7660.jar * * @date 19/08/2015 */ import upm_grove.GroveButton; import upm_grove.GroveLight; import upm_grove.GroveRotary; import upm_grove.GroveTemp; import upm_i2clcd.Jhd1313m1; import upm_mma7660.MMA7660; import upm_ttp223.TTP223; import mraa.Platform; import mraa.mraa; public class VariousSensors { public static void main(String[] args) { // check that we are running on Galileo or Edison Platform platform = mraa.getPlatformType(); if (platform != Platform.INTEL_GALILEO_GEN1 && platform != Platform.INTEL_GALILEO_GEN2 && platform != Platform.INTEL_EDISON_FAB_C) { System.err.println("Unsupported platform, exiting"); return; } // Create the temperature sensor object using AIO pin 1 GroveTemp temp = new GroveTemp(1); // Create the lcd sensor object using I2C pin Jhd1313m1 lcd = new Jhd1313m1(0, 0x3E, 0x62); // Create the button object using GPIO pin 2 GroveButton button = new GroveButton(2); // Create the rotary Sensor object using AIO pin 0 GroveRotary knob = new GroveRotary(0); // Create the light sensor object using AIO pin 2 GroveLight light = new GroveLight(2); // Create the TTP223 touch sensor object using GPIO pin 4 TTP223 touch = new TTP223(4); // Create the MMA7660 accelerometer on I2C bus 0 MMA7660 accel = new MMA7660(0); // place device in standby mode so we can write registers accel.setModeStandby(); // enable 64 samples per second accel.setSampleRate(MMA7660.MMA7660_AUTOSLEEP_T.AUTOSLEEP_64); // place device into active mode accel.setModeActive(); int celsius; float rotary; int lit; boolean tuch; float acc[]; // This for loop is required to get the updated value from respective sensors for (int i=0; i < 5; i++) { celsius = temp.value(); rotary = knob.abs_deg(); lit = light.value(); tuch = touch.isPressed(); acc = accel.getAcceleration(); } lcd.setCursor(0,0); lcd.write("welcome to "); lcd.setCursor(1,0); lcd.write("Starter Kit!"); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); /* This variable helps to display one particular sensor at any given * time, when button value is '1' */ int x = 0; lcd.setCursor(0,0); lcd.write("Press Button "); /* This while loop continously checks for button value. * if button value is '1', the sensors values are displayed depending * on x value */ while(true) { if(button.value() != 0) { if(x == 0) { celsius = temp.value(); // Since LCD displays data in string format, // we need to convert (celsius value) from integer to string String tdata = Integer.toString(celsius); lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Temperature in "); lcd.setCursor(1,2); lcd.write("celsius: "); lcd.write(tdata); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); x += 1; lcd.write("Press Button "); } else if(x == 1) { rotary = knob.abs_deg(); String rotdata = Float.toString(rotary); lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Rotatory Angle "); lcd.setCursor(1,2); lcd.write("in degree: "); lcd.write(rotdata); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); x += 1; lcd.write("Press Button "); } else if(x == 2) { lit = light.value(); String litdata = Integer.toString(lit); lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("light value "); lcd.setCursor(1,2); lcd.write("in lux: "); lcd.write(litdata); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); x += 1; lcd.write("Press Button "); } else if(x == 3) { tuch = touch.isPressed(); String touchdata = Boolean.toString(tuch); lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Touch Sensor "); lcd.setCursor(1,2); lcd.write("is pressed: "); lcd.write(touchdata); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); x += 1; lcd.write("Press Button "); } else if(x == 4) { acc = accel.getAcceleration(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Acceleration x: "); lcd.setCursor(1,2); lcd.write(Float.toString(acc[0])); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Acceleration y: "); lcd.setCursor(1,2); lcd.write(Float.toString(acc[1])); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Acceleration z: "); lcd.setCursor(1,2); lcd.write(Float.toString(acc[2])); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); x = 0; lcd.write("Press Button "); } } } } }
kits/starter/java/VariousSensors.java
/* * Author: Sisinty Sasmita Patra <[email protected]> * Author: Petre Eftime <[email protected]> * * Copyright (c) 2015 Intel Corporation. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /** * @file * @ingroup grove * @brief Various Sensors * * Demonstrate how to read a digital value from an input pin using the MRAA * library. * Suitable ones in the Grove Starter Kit are the Button and Touch Sensor. * * @hardware - Grove Temperature Sensor connected to Grove Base Shield Port A1 * - JHD1313M1 LCD connected to any I2C port on the Grove Base Shield * - Grove Button connected to the Grove Base Shield Port D2 * - Grove Rotary connected to the Grove Base Shield Port A0 * - Grove Light connected to the Grove Base Shield Port A2 * - Grove Touch Sensor connected to the Grove Base Shield Port D4 * - MMA7660 Accelerometer connected to any I2C port on the Grove Base Shield * * @req mraa.jar * @req upm_grove.jar * @req upm_i2clcd.jar * @req upm_ttp223.jar * @req upm_mma7660.jar * * @date 19/08/2015 */ import upm_grove.GroveButton; import upm_grove.GroveLight; import upm_grove.GroveRotary; import upm_grove.GroveTemp; import upm_i2clcd.Jhd1313m1; import upm_mma7660.MMA7660; import upm_ttp223.TTP223; import mraa.Platform; import mraa.mraa; public class VariousSensors { public static void main(String[] args) { // check that we are running on Galileo or Edison Platform platform = mraa.getPlatformType(); if (platform != Platform.INTEL_GALILEO_GEN1 && platform != Platform.INTEL_GALILEO_GEN2 && platform != Platform.INTEL_EDISON_FAB_C) { System.err.println("Unsupported platform, exiting"); return; } // Create the temperature sensor object using AIO pin 1 GroveTemp temp = new GroveTemp(1); // Create the lcd sensor object using I2C pin Jhd1313m1 lcd = new Jhd1313m1(0, 0x3E, 0x62); // Create the button object using GPIO pin 2 GroveButton button = new GroveButton(2); // Create the rotary Sensor object using AIO pin 0 GroveRotary knob = new GroveRotary(0); // Create the light sensor object using AIO pin 2 GroveLight light = new GroveLight(2); // Create the TTP223 touch sensor object using GPIO pin 4 TTP223 touch = new TTP223(4); // Create the MMA7660 accelerometer on I2C bus 0 MMA7660 accel = new MMA7660(0); // place device in standby mode so we can write registers accel.setModeStandby(); // enable 64 samples per second accel.setSampleRate(MMA7660.MMA7660_AUTOSLEEP_T.AUTOSLEEP_64); // place device into active mode accel.setModeActive(); int celsius; float rotary; int lit; boolean tuch; float acc[]; // This for loop is required to get the updated value from respective sensors for (int i=0; i < 5; i++) { celsius = temp.value(); rotary = knob.abs_deg(); lit = light.value(); tuch = touch.isPressed(); acc = accel.getAcceleration(); } lcd.setCursor(0,0); lcd.write("welcome to "); lcd.setCursor(1,0); lcd.write("Starter Kit!"); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); /* This variable helps to display one particular sensor at any given * time, when button value is '1' */ int x = 0; /* This while loop continously checks for button value. * if button value is '0', the LCD displays "press button". * if button value is '1', the sensors values are displayed depending * on x value */ while(true) { if(button.value() == 0) { lcd.setCursor(0,0); lcd.write("Press Button "); } else { if(x == 0) { celsius = temp.value(); // Since LCD displays data in string format, // we need to convert (celsius value) from integer to string String tdata = Integer.toString(celsius); lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Temperature in "); lcd.setCursor(1,2); lcd.write("celsius: "); lcd.write(tdata); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); x += 1; } else if(x == 1) { rotary = knob.abs_deg(); String rotdata = Float.toString(rotary); lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Rotatory Angle "); lcd.setCursor(1,2); lcd.write("in degree: "); lcd.write(rotdata); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); x += 1; } else if(x == 2) { lit = light.value(); String litdata = Integer.toString(lit); lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("light value "); lcd.setCursor(1,2); lcd.write("in lux: "); lcd.write(litdata); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); x += 1; } else if(x == 3) { tuch = touch.isPressed(); String touchdata = Boolean.toString(tuch); lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Touch Sensor "); lcd.setCursor(1,2); lcd.write("is pressed: "); lcd.write(touchdata); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); x += 1; } else if(x == 4) { acc = accel.getAcceleration(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Acceleration x: "); lcd.setCursor(1,2); lcd.write(Float.toString(acc[0])); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Acceleration y: "); lcd.setCursor(1,2); lcd.write(Float.toString(acc[1])); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); try { Thread.sleep(1000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.setCursor(0,0); lcd.write("Acceleration z: "); lcd.setCursor(1,2); lcd.write(Float.toString(acc[2])); try { Thread.sleep(3000); } catch (InterruptedException e) { System.err.println("Sleep interrupted: " + e.toString()); } lcd.clear(); x = 0; } } } } }
starter: modified starter sample and fixed the bug Signed-off-by: Sisinty Sasmita Patra <[email protected]>
kits/starter/java/VariousSensors.java
starter: modified starter sample and fixed the bug
Java
mit
d4e86451139781abb87a1b2f93c2b0885435cab6
0
josephmjoy/robotics,josephmjoy/robotics
// // Structured logging classes. For logging and tracing that is intended to be consumed by other // programs that analyze and visualize the log data. // Created by Joseph M. Joy (https://github.com/josephmjoy) // package com.rinworks.robotutils; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.SocketException; import java.net.UnknownHostException; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.regex.Pattern; public class StructuredLogger { final static int PRI0 = 0; // Tag indicating pri0 final static int PRI1 = 2; // Tag indicating pri1 final static int PRI2 = 2; // Tag indicating pri2 final static String INFO = "INFO"; final static String TRACE = "TRACE"; final static String ERR = "ERR"; final static String WARN = "WARN"; private final RawLogger[] rawLoggers; private final String rootName; private final LogImplementation defaultLog; private String sessionId; private long sessionStart; private boolean sessionStarted = false; private boolean sessionEnded = false; private AtomicInteger seqNo = new AtomicInteger(0); private Consumer<String> assertionFailureHandler = null; // These are for scrubbing message type and message fields before logging. private static final Pattern BAD_NAME_PATTERN = Pattern.compile("[^-.\\w]"); private static final Pattern BAD_MSG_PATTERN = Pattern.compile("\\n\\r"); // Clients provide this to actually write log messages to some system like the file system // or network. public interface RawLogger { // Prepare to log a new session. For example, a file based logging system may // open a new file. SessionId will not contain illegal characters for file names, such as // slashes. This method will be called only once - when the owning structured logging // object's beginSession method is called. void beginSession(String sessionId); // Log a string message void log(int pri, String cat, String msg); // Flush the log to persistent storage if appropriate. void flush(); // Close any open resources (no further calls to log or flush will follow the call to close) void close(); } // The core logger interface - multiple log objects can be built - for // each component/sub-component or even transient logging tasks. public interface Log { // These are reserved key names - don't use them for generating key-value // pairs: final String SESSION_ID = "_sid"; // Session ID final String SEQ_NO = "_sn"; // Sequence number final String TIMESTAMP = "_ts"; // Timestamp - milliseconds since session started. final String COMPONENT = "_co"; final String PRI = "_pri"; // Priority: 0/1/2 final String CAT = "_cat"; // CATEGORY: ERR/WARN/INFO final String TYPE = "_ty"; // Message type final String RELATIVE_TIMESTAMP = "_rts"; // Optional relative timestamp - see Log.beginRTS final String DEF_MSG = "_msg"; // Default key for message contents // // Reserved Messages Types - reserved for messages generated by the logging system itself. // These all begin with an underscore. // final String LOG_SESSION_START = "_LOG_SESSION_STARTED"; // Reserved for messages generated by logging system itself final String LOG_SESSION_END = "_LOG_SESSION_ENDED"; final String LOG_TRACING_PAUSED = "_LOG_TRACING_PAUSED"; final String LOG_TRACING_RESUMED = "_LOG_TRACING_RESUMED"; final String ASSERTFAIL = "_ASSERTION_FAILURE"; // Message generated by loggedAssert final String OTHER = "_OTHER"; // Unspecified user message type // Recommended message types. These are not used by the logging system itself, but they are // RECOMMENDED to be used to log common events. // Log the initialization/deinitialization of a component final String INIT_START = "INIT_START"; final String INIT_END = "INIT_END"; final String DEINIT_START = "DEINIT_START"; final String DEINIT_END = "DEINIT_END"; // // Actual logging methods - logged with message type "_OTHER" // void err(String s); // Log an error - Pri 0 void warn(String s); // Log a warning - Pri 0 void info(String s); // Log some information - Pri 1 // The same logging methods, with a user-suppled message type. void err(String msgType, String s); void warn(String msgType, String s); void info(String msgType, String s); // Traces are like the earlier logging methods, except they can be dynamically // enabled or disabled using the pauseTracing/resumeTracing methods. void trace(String s); // Log potentially high-volume trace data - Pri 2 void trace(String msgType, String s); // As above, with a user-defined message data type // FUTURE: Concept of an optional 'location specifier' integer that is set to a random integer that is with // very high priority unique across the source code - to be able to quickly identify the source code where the log method // was invoked. In Python it could be a named parameter, ls=0 so we don't cause an explosion in the number // of logging statements. // If {cond} is false log an error, flush the log. If there is an assertion // failure handler associated with the structured logger, the handler is called. // The handler may be set by calling setAssertionFailureHandler. // void loggedAssert(boolean cond, String s); // Tracing is enabled by default, but may be paused/resumed // dynamically - useful for selectively tracing extremely verbose // data. This applies only to this log instance. void pauseTracing(); // stop logging void resumeTracing();// (re)start logging // Starts adding a relative time stamp. Subsequent logging will include a _RTS key whose value is the // time in milliseconds that has elapsed since this call was invoked. This applies only to this // log instance. void startRTS(); // Stops adding the relative stamps for this log instance. void stopRTS(); // The following methods adds key (the 'tag') that gets inserted into // every log message made from this particular log instance. // Tags must be composed entirely of non-whitespace characters and can do not // include the ':' (colon) character. // [FUTURE: Special 'moustache' tags like would get dynamic values, like {TID} would set TID=<thread ID>] void addTag(String tag); // A tag with an empty value. Can represent boolean conditions. void addTag(String tag, String value); // A tag with the specified value (which could be an empty string). // Removes a previously added tag. Attempting to remove a null, empty or nonexistant tag is silently ignored. void removeTag(String tag); // Flush the ENTIRE log, not just this sub-component. void flush(); // Creates a new log object. This is equivalent to calling the root // StructureLoggerObject's newLog method - there is no special relationship between // the current instance and the newly created logs. A hierarchical relationship can be established // by following a suitable naming convention such as dotted-namespace notation. Log newLog(String name); // FUTURE // Hidden tags whose existence can be checked (or rather asserted to be present or absent) at a future time. // Perhaps these could be added as named parameters in Python, otherwise additional methods to add, assert and remove tags.) } // Creates the containing logger object. This object can be used to // create the hierarchy of Logger objects. (Start by calling beginSession and // then getRootLog). // // {_assertionFailureHandler} is an optional handler of assertion failures - it is // called if the call to loggedAssert fails the assertion tes (failure has already been logged and flush() called)). // One implementation is to simply call assert(false) after an error message to debug putput. Another is to throw an // exception. WARNING: Will be invoked even if there is no active session. public StructuredLogger(RawLogger _rawLogger, String _rootName) { this(new RawLogger[] {_rawLogger}, _rootName); } // This version takes an array of rawLoggers so that logging output may be piped to multiple logger // sinks. public StructuredLogger(RawLogger[] _rawLoggers, String _rootName) { this.rawLoggers = _rawLoggers; this.rootName = _rootName; this.defaultLog = this.commonNewLog(_rootName); } // Consolidates calls to create a new log objects, incase we want to do something more // like keep a list of logs. At present we don't keep a global list of allocated log objects. private LogImplementation commonNewLog(String name) { return new LogImplementation(name); } // Updates the assertion failure handler. // The default handler is null, which means that assertion failures are logged but // otherwise no action is taken. // Note that there is no thread synchronization in this update - so it's best // to set this up before calling beginLogging. public void setAsseretionFailureHandler(Consumer<String> _assertionFailureHandler) { this.assertionFailureHandler = _assertionFailureHandler; } // Begins the logging session. The Session timestamp is set. // Caller must ensure no other thread attempts to log concurrently with // this call - actual logging calls are not synchronized for performance // reasons. public synchronized void beginLogging() { assert(!this.sessionStarted && !this.sessionEnded); long startTime = System.currentTimeMillis(); String sessionID = "" + startTime; // WAS String.format("%020d", startTime); this.sessionId = sessionID; this.sessionStart = startTime; this.sessionStarted = true; seqNo.set(0); // First logged sequence number in the session is 1. for (RawLogger rl: rawLoggers) { rl.beginSession(sessionId); } defaultLog.pri0(Log.LOG_SESSION_START, this.rootName); } // Caller must ensure no other thread attempts to log concurrently // with this thread - actual logging calls are not synchronized for // performance reasons. // WARNING - the StructuredLogger can only do a single session in its lifetime. // Once the session has been ended a new session can not be started. public synchronized void endLogging() { assert(this.sessionStarted); defaultLog.pri0(Log.LOG_SESSION_END, rootName); this.sessionStarted = false; this.sessionEnded = true; for (RawLogger rl: rawLoggers) { rl.flush(); rl.close(); } } // The base logger support some simple logging functions for // convenience. Look at the Log interface methods for full documentation void err(String s) { this.defaultLog.err(s); } void warn(String s) { this.defaultLog.warn(s); } void info(String s) { this.defaultLog.info(s); } // Get the root ("top level") log object, which provides a much richer // set of logging methods public Log defaultLog() { return this.defaultLog; } // This private class implements a Log object private class LogImplementation implements Log { final String component; boolean tracingEnabled = true; // {component} should be a short - 3-5 char - representation of the component. // The component hierarchy is represented using dotted notation, e.g.: root.a.b.c LogImplementation(String component) { this.component = scrubName(component); // Replace ':' etc (these shouldn't be there) by '#' } // See the Logger interface definition for documentation on // these overridden methods. @Override public LogImplementation newLog(String component) { // Note: commonNewLog is actually a method of the *containing* // class - an instance of StructuredLogger. return commonNewLog(rootName + "." + component); } @Override public void trace(String msgType, String s) { if (tracingEnabled) { rawLog(PRI2, TRACE, scrubName(msgType), s); } } @Override public void err(String s) { err(OTHER, s); } @Override public void warn(String s) { warn(OTHER, s); } @Override public void info(String s) { info(OTHER, s); } @Override public void trace(String s) { trace(OTHER, s); } @Override public void pauseTracing() { pri0(LOG_TRACING_PAUSED, ""); tracingEnabled = false; } @Override public void resumeTracing() { pri0(LOG_TRACING_RESUMED, ""); tracingEnabled = true; } @Override public void loggedAssert(boolean cond, String s) { // Note that we will call the assertionFailureHandler even if there is the logging session is not active. // However if there is no session, there will be no logging and flushing (those methods below will have no effect). if (!cond) { rawLog(PRI0, ERR, ASSERTFAIL, s); this.flush(); if (assertionFailureHandler!=null) { assertionFailureHandler.accept(s); } } } @Override public void flush() { if (sessionStarted) { for (RawLogger rl: rawLoggers) { rl.flush(); } } } // Not for use outside the containing class. void pri0(String msgType, String s) { rawLog(PRI0, INFO, msgType, s); } private void rawLog(int pri, String cat, String msgType, String msg) { // Example: // _sid:989, _sn:1, _ts: 120, _co: .b, _pri:1, _sev:INFO, _ty:OTHER, Hello world! // Note that sessionStarted is defined in the containing class - StructuredLogger! if (!sessionStarted) { return; // ******************** EARLY RETURN ****************** } msgType = scrubName(msgType); msg = scrubMessage(msg); // As a special case, if msg contains no colons, we prefix a special _msg key. if (msg.indexOf(StructuredMessageMapper.COLON)==-1) { msg = DEF_MSG + StructuredMessageMapper.COLON + msg; } int curSeq = seqNo.incrementAndGet(); long timestamp = System.currentTimeMillis() - sessionStart; String output = String.format("%s:%s %s:%s %s:%s %s:%s %s:%s %s:%s %s:%s %s", Log.SESSION_ID, sessionId, Log.SEQ_NO, curSeq, Log.TIMESTAMP, timestamp, Log.COMPONENT, component, Log.PRI, pri, Log.CAT, cat, Log.TYPE, msgType, msg ); if (sessionStarted) { for (RawLogger rl: rawLoggers) { rl.log(pri, cat, output); } } } @Override public void err(String msgType, String s) { // TODO Auto-generated method stub } @Override public void warn(String msgType, String s) { // TODO Auto-generated method stub } @Override public void info(String msgType, String s) { // TODO Auto-generated method stub } @Override public void startRTS() { // TODO Auto-generated method stub } @Override public void stopRTS() { // TODO Auto-generated method stub } @Override public void addTag(String tag) { // TODO Auto-generated method stub } @Override public void addTag(String tag, String value) { // TODO Auto-generated method stub } @Override public void removeTag(String tag) { // TODO Auto-generated method stub } } // Replace invalid chars by a '#' private static String scrubName(String msgType) { // Presumably this is faster than using a Regex? Not sure. return BAD_NAME_PATTERN.matcher(msgType).replaceAll("#"); } // Replace invalid chars by a '#' private static String scrubMessage(String msgType) { return BAD_MSG_PATTERN.matcher(msgType).replaceAll("#"); } private static class FileRawLogger implements RawLogger { final boolean perSessionLog; final boolean append; final File logDirectory; File logFile; final String prefix; final String suffix; BufferedWriter out; boolean logErrorNotified; // we generate on err msg on write error. // Logger that creates per-session log files public FileRawLogger(File _logDirectory, String _prefix, String _suffix, boolean _append) { perSessionLog = true; logDirectory = _logDirectory; prefix = _prefix; suffix = _suffix; append = _append; // We don't throw any exceptions on error, just write the error to the err console. if (!logDirectory.canWrite()) { System.err.println(String.format( "FileRawLogger: log directory {%s} cannot be written to.", logDirectory.getAbsolutePath() )); } } // Logger that logs to a single log file public FileRawLogger(File _logFile, boolean _append) { perSessionLog = false; logDirectory = null; logFile = _logFile; prefix = null; suffix = null; append = _append; } @Override public void beginSession(String sessionId) { if (perSessionLog) { String name = prefix + sessionId + suffix; logFile = new File(logDirectory, name); } try { out = new BufferedWriter(new FileWriter(logFile, append)); } catch (IOException e) { System.err.println(String.format( "FileRawLogger: Cannot log. Could not create/open log file {%s}. Exception: %s", logDirectory.getAbsolutePath(), e)); out = null; } } @Override public void log(int pri, String cat, String msg) { try { if (out !=null ) { out.write(msg, 0, msg.length()); out.newLine(); } } catch (IOException e) { if (!logErrorNotified) { System.err.println(String.format("FileRawLogger: could not write to log file {%s}. Exception: %s", logFile.getAbsolutePath(), e)); logErrorNotified = true; } } } @Override public void flush() { try { if (out != null) { out.flush(); } } catch (IOException e) { System.err.println(String.format("FileRawLogger: could not flush log file {%s}. Exception: %s", logFile.getAbsolutePath(), e)); } } @Override public void close() { try { if (out != null) { out.close(); } } catch (IOException e) { System.err.println(String.format("FileRawLogger: could not close log file {%s}. Exception: %s", logFile.getAbsolutePath(), e)); } } } private static class UDPRawLogger implements RawLogger { final String destAddress; final int destPort; boolean logErrorNotified; // we generate one err msg if there is an error message on write.. DatagramSocket clientSocket; InetAddress destIPAddress; boolean canLog = false; // Logger that logs by sending UDP traffic to the specified address and port. public UDPRawLogger(String _address, int _port) { destAddress = _address; destPort = _port; } @Override public void beginSession(String sessionId) { try { clientSocket = new DatagramSocket(); destIPAddress = InetAddress.getByName(destAddress); } catch (SocketException e) { System.err.println("UDPRawLogger: Cannot log. Could not create DatagramSocket. Exception: " + e); } catch (UnknownHostException e) { System.err.println("UDPRawLogger: Cannot log. Could not resolve address " + destAddress + ". Exception: " + e); } System.out.println(String.format("UDPRawLogger: logging session %s to IP Address %s, port %d", sessionId, destIPAddress, destPort)); canLog = true; } @Override public void log(int pri, String cat, String msg) { try { if (canLog) { byte[] sendData = msg.getBytes(); DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, destIPAddress, destPort); clientSocket.send(sendPacket); } } catch (IOException e) { if (!logErrorNotified) { System.err.println(String.format("UDPRawLogger: could not send msg to IP Address %s, port %d. Exception: %s", destIPAddress.toString(), destPort, e)); logErrorNotified = true; } } } @Override public void flush() { // Nothing to do as we don't buffer messages. } @Override public void close() { if (clientSocket != null) { clientSocket.close(); clientSocket = null; } } } /** * Creates a logger that generates per-session log files of the form {perfix}{session id}{suffix}. * No IOExceptions are thrown. Instead error messages are written to System.err. * @param logDirectory - directory where log files will reside. * @param prefix - filename prefix * @param suffix - filename suffix * @param append - true: append to the file if it exist; false: overwrite the file if it exists. * @return A StructuredLogger.Logger object that may be passed into a StructuredLogger constructor */ public static RawLogger createFileLogger(File logDirectory, String prefix, String suffix, boolean append) { FileRawLogger fileLogger = new FileRawLogger(logDirectory, prefix, suffix, append); return fileLogger; } /** * Creates a logger that logs multiple sessions to a single file. * No IOExceptions are thrown. Instead error messages are written to System.err. * @param logFile - File object representing log file path. * @param append - true: append to the file if it exist; false: overwrite the file if it exists. * @return A StructuredLogger.Logger object that may be passed into a StructuredLogger constructor */ public static RawLogger createFileLogger(File logFile, boolean append) { FileRawLogger fileLogger = new FileRawLogger(logFile, append); return fileLogger; } /** * Creates a logger that transmits log messages as UDP packets to the specified destination. * @param address - Destination host name or IP Address * @param port - Destination port. * @return A StructuredLogger.Logger object that may be passed into a StructuredLogger constructor */ public static RawLogger createUDPLogger(String address, int port) { UDPRawLogger fileLogger = new UDPRawLogger(address, port); return fileLogger; } }
java_robotutils/src/com/rinworks/robotutils/StructuredLogger.java
// // Structured logging classes. For logging and tracing that is intended to be consumed by other // programs that analyze and visualize the log data. // Created by Joseph M. Joy (https://github.com/josephmjoy) // package com.rinworks.robotutils; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.SocketException; import java.net.UnknownHostException; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.regex.Pattern; public class StructuredLogger { final static int PRI0 = 0; // Tag indicating pri0 final static int PRI1 = 2; // Tag indicating pri1 final static int PRI2 = 2; // Tag indicating pri2 final static String INFO = "INFO"; final static String TRACE = "TRACE"; final static String ERR = "ERR"; final static String WARN = "WARN"; private final RawLogger[] rawLoggers; private final String rootName; private final LogImplementation defaultLog; private String sessionId; private long sessionStart; private boolean sessionStarted = false; private boolean sessionEnded = false; private AtomicInteger seqNo = new AtomicInteger(0); private Consumer<String> assertionFailureHandler = null; // These are for scrubbing message type and message fields before logging. private static final Pattern BAD_NAME_PATTERN = Pattern.compile("[^-.\\w]"); private static final Pattern BAD_MSG_PATTERN = Pattern.compile("\\n\\r"); // Clients provide this to actually write log messages to some system like the file system // or network. public interface RawLogger { // Prepare to log a new session. For example, a file based logging system may // open a new file. SessionId will not contain illegal characters for file names, such as // slashes. This method will be called only once - when the owning structured logging // object's beginSession method is called. void beginSession(String sessionId); // Log a string message void log(int pri, String cat, String msg); // Flush the log to persistent storage if appropriate. void flush(); // Close any open resources (no further calls to log or flush will follow the call to close) void close(); } // The core logger interface - multiple log objects can be built - for // each component/sub-component or even transient logging tasks. public interface Log { // These are reserved key names - don't use them for generating key-value // pairs: final String SESSION_ID = "_sid"; // Session ID final String SEQ_NO = "_sn"; // Sequence number final String TIMESTAMP = "_ts"; // Timestamp - milliseconds since session started. final String COMPONENT = "_co"; final String PRI = "_pri"; // Priority: 0/1/2 final String CAT = "_cat"; // CATEGORY: ERR/WARN/INFO final String TYPE = "_ty"; // Message type final String RELATIVE_TIMESTAMP = "_rts"; // Optional relative timestamp - see Log.beginRTS final String DEF_MSG = "_msg"; // Default key for message contents // // Reserved Messages Types - reserved for messages generated by the logging system itself. // These all begin with an underscore. // final String LOG_SESSION_START = "_LOG_SESSION_STARTED"; // Reserved for messages generated by logging system itself final String LOG_SESSION_END = "_LOG_SESSION_ENDED"; final String LOG_TRACING_PAUSED = "_LOG_TRACING_PAUSED"; final String LOG_TRACING_RESUMED = "_LOG_TRACING_RESUMED"; final String ASSERTFAIL = "_ASSERTION_FAILURE"; // Message generated by loggedAssert final String OTHER = "_OTHER"; // Unspecified user message type // Recommended message types. These are not used by the logging system itself, but they are // RECOMMENDED to be used to log common events. // Log the initialization/deinitialization of a component final String INIT_START = "INIT_START"; final String INIT_END = "INIT_END"; final String DEINIT_START = "DEINIT_START"; final String DEINIT_END = "DEINIT_END"; // // Actual logging methods - logged with message type "_OTHER" // void err(String s); // Log an error - Pri 0 void warn(String s); // Log a warning - Pri 0 void info(String s); // Log some information - Pri 1 // The same logging methods, with a user-suppled message type. void err(String msgType, String s); void warn(String msgType, String s); void info(String msgType, String s); // Traces are like the earlier logging methods, except they can be dynamically // enabled or disabled using the pauseTracing/resumeTracing methods. void trace(String s); // Log potentially high-volume trace data - Pri 2 void trace(String msgType, String s); // As above, with a user-defined message data type // FUTURE: Concept of an optional 'location specifier' integer that is set to a random integer that is with // very high priority unique across the source code - to be able to quickly identify the source code where the log method // was invoked. In Python it could be a named parameter, ls=0 so we don't cause an explosion in the number // of logging statements. // If {cond} is false log an error, flush the log. If there is an assertion // failure handler associated with the structured logger, the handler is called. // The handler may be set by calling setAssertionFailureHandler. // void loggedAssert(boolean cond, String s); // Tracing is enabled by default, but may be paused/resumed // dynamically - useful for selectively tracing extremely verbose // data. This applies only to this log instance. void pauseTracing(); // stop logging void resumeTracing();// (re)start logging // Starts adding a relative time stamp. Subsequent logging will include a _RTS key whose value is the // time in milliseconds that has elapsed since this call was invoked. This applies only to this // log instance. void startRTS(); // Stops adding the relative stamps for this log instance. void stopRTS(); // The following methods adds key (the 'tag') that gets inserted into // every log message made from this particular log instance. // Tags must be composed entirely of non-whitespace characters and can do not // include the ':' (colon) character. // [FUTURE: Special 'moustache' tags like would get dynamic values, like {TID} would set TID=<thread ID>] void addTag(String tag); // A tag with an empty value. Can represent boolean conditions. void addTag(String tag, String value); // A tag with the specified value (which could be an empty string). // Removes a previously added tag. Attempting to remove a null, empty or nonexistant tag is silently ignored. void removeTag(String tag); // Flush the ENTIRE log, not just this sub-component. void flush(); // Creates a new log object. This is equivalent to calling the root // StructureLoggerObject's newLog method - there is no special relationship between // the current instance and the newly created logs. A hierarchical relationship can be established // by following a suitable naming convention such as dotted-namespace notation. Log newLog(String name); // FUTURE // Hidden tags whose existence can be checked (or rather asserted to be present or absent) at a future time. // Perhaps these could be added as named parameters in Python, otherwise additional methods to add, assert and remove tags.) } // Creates the containing logger object. This object can be used to // create the hierarchy of Logger objects. (Start by calling beginSession and // then getRootLog). // // {_assertionFailureHandler} is an optional handler of assertion failures - it is // called if the call to loggedAssert fails the assertion tes (failure has already been logged and flush() called)). // One implementation is to simply call assert(false) after an error message to debug putput. Another is to throw an // exception. WARNING: Will be invoked even if there is no active session. public StructuredLogger(RawLogger _rawLogger, String _rootName) { this(new RawLogger[] {_rawLogger}, _rootName); } // This version takes an array of rawLoggers so that logging output may be piped to multiple logger // sinks. public StructuredLogger(RawLogger[] _rawLoggers, String _rootName) { this.rawLoggers = _rawLoggers; this.rootName = _rootName; this.defaultLog = new LogImplementation(_rootName); } // Updates the assertion failure handler. // The default handler is null, which means that assertion failures are logged but // otherwise no action is taken. public void setAsseretionFailureHandler(Consumer<String> _assertionFailureHandler) { this.assertionFailureHandler = _assertionFailureHandler; } // Get the root ("top level") log object. public Log defaultLog() { return this.defaultLog; } // Begins the logging session. The Session timestamp is set. // Caller must ensure no other thread attempts to log concurrently with // this call - actual logging calls are not synchronized for performance // reasons. public synchronized void beginLogging() { assert(!this.sessionStarted && !this.sessionEnded); long startTime = System.currentTimeMillis(); String sessionID = "" + startTime; // WAS String.format("%020d", startTime); this.sessionId = sessionID; this.sessionStart = startTime; this.sessionStarted = true; seqNo.set(0); // First logged sequence number in the session is 1. for (RawLogger rl: rawLoggers) { rl.beginSession(sessionId); } defaultLog.pri0(Log.LOG_SESSION_START, this.rootName); } // Caller must ensure no other thread attempts to log concurrently // with this thread - actual logging calls are not synchronized for // performance reasons. // WARNING - the StructuredLogger can only do a single session in its lifetime. // Once the session has been ended a new session can not be started. public synchronized void endLogging() { assert(this.sessionStarted); defaultLog.pri0(Log.LOG_SESSION_END, rootName); this.sessionStarted = false; this.sessionEnded = true; for (RawLogger rl: rawLoggers) { rl.flush(); rl.close(); } } // This private class implements a Log object private class LogImplementation implements Log { final String component; boolean tracingEnabled = true; // {component} should be a short - 3-5 char - representation of the component. // The component hierarchy is represented using dotted notation, e.g.: root.a.b.c LogImplementation(String component) { this.component = scrubName(component); // Replace ':' etc (these shouldn't be there) by '#' } // See the Logger interface definition for documentation on // these overridden methods. @Override public LogImplementation newLog(String component) { return new LogImplementation(rootName + "." + component); } @Override public void trace(String msgType, String s) { if (tracingEnabled) { rawLog(PRI2, TRACE, scrubName(msgType), s); } } @Override public void err(String s) { err(OTHER, s); } @Override public void warn(String s) { warn(OTHER, s); } @Override public void info(String s) { info(OTHER, s); } @Override public void trace(String s) { trace(OTHER, s); } @Override public void pauseTracing() { pri0(LOG_TRACING_PAUSED, ""); tracingEnabled = false; } @Override public void resumeTracing() { pri0(LOG_TRACING_RESUMED, ""); tracingEnabled = true; } @Override public void loggedAssert(boolean cond, String s) { // Note that we will call the assertionFailureHandler even if there is the logging session is not active. // However if there is no session, there will be no logging and flushing (those methods below will have no effect). if (!cond) { rawLog(PRI0, ERR, ASSERTFAIL, s); this.flush(); if (assertionFailureHandler!=null) { assertionFailureHandler.accept(s); } } } @Override public void flush() { if (sessionStarted) { for (RawLogger rl: rawLoggers) { rl.flush(); } } } // Not for use outside the containing class. void pri0(String msgType, String s) { rawLog(PRI0, INFO, msgType, s); } private void rawLog(int pri, String cat, String msgType, String msg) { // Example: // _sid:989, _sn:1, _ts: 120, _co: .b, _pri:1, _sev:INFO, _ty:OTHER, Hello world! // Note that sessionStarted is defined in the containing class - StructuredLogger! if (!sessionStarted) { return; // ******************** EARLY RETURN ****************** } msgType = scrubName(msgType); msg = scrubMessage(msg); // As a special case, if msg contains no colons, we prefix a special _msg key. if (msg.indexOf(StructuredMessageMapper.COLON)==-1) { msg = DEF_MSG + StructuredMessageMapper.COLON + msg; } int curSeq = seqNo.incrementAndGet(); long timestamp = System.currentTimeMillis() - sessionStart; String output = String.format("%s:%s %s:%s %s:%s %s:%s %s:%s %s:%s %s:%s %s", Log.SESSION_ID, sessionId, Log.SEQ_NO, curSeq, Log.TIMESTAMP, timestamp, Log.COMPONENT, component, Log.PRI, pri, Log.CAT, cat, Log.TYPE, msgType, msg ); if (sessionStarted) { for (RawLogger rl: rawLoggers) { rl.log(pri, cat, output); } } } @Override public void err(String msgType, String s) { // TODO Auto-generated method stub } @Override public void warn(String msgType, String s) { // TODO Auto-generated method stub } @Override public void info(String msgType, String s) { // TODO Auto-generated method stub } @Override public void startRTS() { // TODO Auto-generated method stub } @Override public void stopRTS() { // TODO Auto-generated method stub } @Override public void addTag(String tag) { // TODO Auto-generated method stub } @Override public void addTag(String tag, String value) { // TODO Auto-generated method stub } @Override public void removeTag(String tag) { // TODO Auto-generated method stub } } // Replace invalid chars by a '#' private static String scrubName(String msgType) { // Presumably this is faster than using a Regex? Not sure. return BAD_NAME_PATTERN.matcher(msgType).replaceAll("#"); } // Replace invalid chars by a '#' private static String scrubMessage(String msgType) { return BAD_MSG_PATTERN.matcher(msgType).replaceAll("#"); } private static class FileRawLogger implements RawLogger { final boolean perSessionLog; final boolean append; final File logDirectory; File logFile; final String prefix; final String suffix; BufferedWriter out; boolean logErrorNotified; // we generate on err msg on write error. // Logger that creates per-session log files public FileRawLogger(File _logDirectory, String _prefix, String _suffix, boolean _append) { perSessionLog = true; logDirectory = _logDirectory; prefix = _prefix; suffix = _suffix; append = _append; // We don't throw any exceptions on error, just write the error to the err console. if (!logDirectory.canWrite()) { System.err.println(String.format( "FileRawLogger: log directory {%s} cannot be written to.", logDirectory.getAbsolutePath() )); } } // Logger that logs to a single log file public FileRawLogger(File _logFile, boolean _append) { perSessionLog = false; logDirectory = null; logFile = _logFile; prefix = null; suffix = null; append = _append; } @Override public void beginSession(String sessionId) { if (perSessionLog) { String name = prefix + sessionId + suffix; logFile = new File(logDirectory, name); } try { out = new BufferedWriter(new FileWriter(logFile, append)); } catch (IOException e) { System.err.println(String.format( "FileRawLogger: Cannot log. Could not create/open log file {%s}. Exception: %s", logDirectory.getAbsolutePath(), e)); out = null; } } @Override public void log(int pri, String cat, String msg) { try { if (out !=null ) { out.write(msg, 0, msg.length()); out.newLine(); } } catch (IOException e) { if (!logErrorNotified) { System.err.println(String.format("FileRawLogger: could not write to log file {%s}. Exception: %s", logFile.getAbsolutePath(), e)); logErrorNotified = true; } } } @Override public void flush() { try { if (out != null) { out.flush(); } } catch (IOException e) { System.err.println(String.format("FileRawLogger: could not flush log file {%s}. Exception: %s", logFile.getAbsolutePath(), e)); } } @Override public void close() { try { if (out != null) { out.close(); } } catch (IOException e) { System.err.println(String.format("FileRawLogger: could not close log file {%s}. Exception: %s", logFile.getAbsolutePath(), e)); } } } private static class UDPRawLogger implements RawLogger { final String destAddress; final int destPort; boolean logErrorNotified; // we generate one err msg if there is an error message on write.. DatagramSocket clientSocket; InetAddress destIPAddress; boolean canLog = false; // Logger that logs by sending UDP traffic to the specified address and port. public UDPRawLogger(String _address, int _port) { destAddress = _address; destPort = _port; } @Override public void beginSession(String sessionId) { try { clientSocket = new DatagramSocket(); destIPAddress = InetAddress.getByName(destAddress); } catch (SocketException e) { System.err.println("UDPRawLogger: Cannot log. Could not create DatagramSocket. Exception: " + e); } catch (UnknownHostException e) { System.err.println("UDPRawLogger: Cannot log. Could not resolve address " + destAddress + ". Exception: " + e); } System.out.println(String.format("UDPRawLogger: logging session %s to IP Address %s, port %d", sessionId, destIPAddress, destPort)); canLog = true; } @Override public void log(int pri, String cat, String msg) { try { if (canLog) { byte[] sendData = msg.getBytes(); DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, destIPAddress, destPort); clientSocket.send(sendPacket); } } catch (IOException e) { if (!logErrorNotified) { System.err.println(String.format("UDPRawLogger: could not send msg to IP Address %s, port %d. Exception: %s", destIPAddress.toString(), destPort, e)); logErrorNotified = true; } } } @Override public void flush() { // Nothing to do as we don't buffer messages. } @Override public void close() { if (clientSocket != null) { clientSocket.close(); clientSocket = null; } } } /** * Creates a logger that generates per-session log files of the form {perfix}{session id}{suffix}. * No IOExceptions are thrown. Instead error messages are written to System.err. * @param logDirectory - directory where log files will reside. * @param prefix - filename prefix * @param suffix - filename suffix * @param append - true: append to the file if it exist; false: overwrite the file if it exists. * @return A StructuredLogger.Logger object that may be passed into a StructuredLogger constructor */ public static RawLogger createFileLogger(File logDirectory, String prefix, String suffix, boolean append) { FileRawLogger fileLogger = new FileRawLogger(logDirectory, prefix, suffix, append); return fileLogger; } /** * Creates a logger that logs multiple sessions to a single file. * No IOExceptions are thrown. Instead error messages are written to System.err. * @param logFile - File object representing log file path. * @param append - true: append to the file if it exist; false: overwrite the file if it exists. * @return A StructuredLogger.Logger object that may be passed into a StructuredLogger constructor */ public static RawLogger createFileLogger(File logFile, boolean append) { FileRawLogger fileLogger = new FileRawLogger(logFile, append); return fileLogger; } /** * Creates a logger that transmits log messages as UDP packets to the specified destination. * @param address - Destination host name or IP Address * @param port - Destination port. * @return A StructuredLogger.Logger object that may be passed into a StructuredLogger constructor */ public static RawLogger createUDPLogger(String address, int port) { UDPRawLogger fileLogger = new UDPRawLogger(address, port); return fileLogger; } }
StructuredLogger: added err, warn info to the base StructuredLogger This is for convenience - so simple logging requires the fewest steps. Also: route internal calls to create a new log to a common method in the StructuredLogger (commonNewLog). This is so that in the future we can if we want do some global thing when create new logs like add them to a global list of logs. For the present, however, we do not keep track of created logs.
java_robotutils/src/com/rinworks/robotutils/StructuredLogger.java
StructuredLogger: added err, warn info to the base StructuredLogger
Java
mit
ac7a40a93500d18f792c44e0d9ec5e3c2075f5f8
0
eaglesakura/onactivityresult-invoke,eaglesakura/onactivityresult-invoke
package com.eaglesakura.android.oari; import android.content.Intent; import android.support.v4.app.Fragment; import java.lang.reflect.Method; import java.util.List; /** * "@OnActivityResult"アノテーションを付与したメソッドを自動的に呼び出すためのUtil */ public class ActivityResult { private static boolean invoke(Method[] methods, Object sender, int requestCode, int resultCode, Intent data) { if (methods == null || methods.length == 0) { return false; } for (Method m : methods) { OnActivityResult onActivityResult = m.getAnnotation(OnActivityResult.class); if (onActivityResult != null && onActivityResult.value() == requestCode) { // ハンドリング対象のリクエストコードを見つけたら、それを呼び出す try { m.setAccessible(true); m.invoke(sender, resultCode, data); return true; } catch (Exception e) { e.printStackTrace(); return false; } } } return false; } /** * onActivityResultを呼び出す * * @param sender 呼び出し対象のオブジェクト * @return ハンドリングを行ったらtrue */ public static boolean invoke(Object sender, int requestCode, int resultCode, Intent data) { Class clazz = sender.getClass(); // publicなメソッドでコールできればそれで良い if (invoke(clazz.getMethods(), sender, requestCode, resultCode, data)) { return true; } // 定義を遡って適当なメソッドを探す while (!clazz.equals(Object.class)) { if (invoke(clazz.getDeclaredMethods(), sender, requestCode, resultCode, data)) { return true; } clazz = clazz.getSuperclass(); } return false; } /** * @param sender 呼び出し対象のFragment */ public static boolean invokeRecursive(Fragment sender, int requestCode, int resultCode, Intent data) { if (invoke(sender, requestCode, resultCode, data)) { return true; } // Fragmentの場合は、子を巡ってハンドリング対象を探す if (sender instanceof Fragment) { List<Fragment> fragments = sender.getChildFragmentManager().getFragments(); if (fragments != null) { for (Fragment child : fragments) { if (invokeRecursive(child, requestCode, resultCode, data)) { // 子がハンドリングに成功した return true; } } } } return false; } }
src/main/java/com/eaglesakura/android/oari/ActivityResult.java
package com.eaglesakura.android.oari; import android.content.Intent; import android.support.v4.app.Fragment; import java.lang.reflect.Method; import java.util.List; /** * "@OnActivityResult"アノテーションを付与したメソッドを自動的に呼び出すためのUtil */ public class ActivityResult { private static boolean invoke(Method[] methods, Object sender, int requestCode, int resultCode, Intent data) { if (methods == null || methods.length == 0) { return false; } for (Method m : methods) { OnActivityResult onActivityResult = m.getAnnotation(OnActivityResult.class); if (onActivityResult != null && onActivityResult.value() == requestCode) { // ハンドリング対象のリクエストコードを見つけたら、それを呼び出す try { m.setAccessible(true); m.invoke(sender, resultCode, data); return true; } catch (Exception e) { e.printStackTrace(); return false; } } } return false; } /** * onActivityResultを呼び出す * * @param sender 呼び出し対象のオブジェクト * @return ハンドリングを行ったらtrue */ public static boolean invoke(Object sender, int requestCode, int resultCode, Intent data) { Class clazz = sender.getClass(); // publicなメソッドでコールできればそれで良い if (invoke(clazz.getMethods(), sender, requestCode, resultCode, data)) { return true; } // 定義を遡って適当なメソッドを探す while (!clazz.equals(Object.class)) { if (invoke(clazz.getDeclaredMethods(), sender, requestCode, resultCode, data)) { return true; } clazz = clazz.getSuperclass(); } return false; } /** * @param sender 呼び出し対象のFragment */ public static boolean invokeRecursive(Fragment sender, int requestCode, int resultCode, Intent data) { if (invoke(sender, requestCode, resultCode, data)) { return true; } // Fragmentの場合は、子を巡ってハンドリング対象を探す if (sender instanceof Fragment) { List<Fragment> fragments = sender.getChildFragmentManager().getFragments(); if (fragments != null) { for (Fragment child : fragments) { if (invoke(child, requestCode, resultCode, data)) { // 子がハンドリングに成功した return true; } } } } return false; } }
Fragmentの再帰が正常に行われていない不具合を修正
src/main/java/com/eaglesakura/android/oari/ActivityResult.java
Fragmentの再帰が正常に行われていない不具合を修正
Java
mit
30e5657775f45421ff43c6ba01da8f991bb6bf63
0
markovandooren/jlo
package subobjectjava.translate; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import jnome.core.expression.ConstructorInvocation; import jnome.core.language.Java; import jnome.core.type.JavaTypeReference; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.rejuse.association.Association; import org.rejuse.association.SingleAssociation; import org.rejuse.logic.ternary.Ternary; import org.rejuse.predicate.UnsafePredicate; import subobjectjava.input.SubobjectJavaModelFactory; import subobjectjava.model.component.ComponentRelation; import subobjectjava.model.component.ConfigurationBlock; import subobjectjava.model.component.ConfigurationClause; import subobjectjava.model.component.OverridesClause; import subobjectjava.model.expression.SubobjectConstructorCall; import subobjectjava.model.language.SubobjectJava; import subobjectjava.model.language.SubobjectJavaOverridesRelation; import chameleon.core.Config; import chameleon.core.declaration.CompositeQualifiedName; import chameleon.core.declaration.Declaration; import chameleon.core.declaration.QualifiedName; import chameleon.core.declaration.Signature; import chameleon.core.declaration.SimpleNameSignature; import chameleon.core.element.Element; import chameleon.core.expression.ActualArgument; import chameleon.core.expression.Invocation; import chameleon.core.expression.NamedTargetExpression; import chameleon.core.language.Language; import chameleon.core.lookup.LookupException; import chameleon.core.member.Member; import chameleon.core.method.Method; import chameleon.core.method.RegularImplementation; import chameleon.core.method.RegularMethod; import chameleon.core.method.exception.ExceptionClause; import chameleon.core.modifier.Modifier; import chameleon.core.namespace.Namespace; import chameleon.core.namespace.RootNamespace; import chameleon.core.namespacepart.Import; import chameleon.core.namespacepart.NamespacePart; import chameleon.core.reference.SimpleReference; import chameleon.core.statement.Block; import chameleon.core.type.RegularType; import chameleon.core.type.Type; import chameleon.core.type.TypeReference; import chameleon.core.type.generics.InstantiatedTypeParameter; import chameleon.core.type.generics.TypeParameter; import chameleon.core.type.inheritance.SubtypeRelation; import chameleon.core.variable.FormalParameter; import chameleon.exception.ChameleonProgrammerException; import chameleon.input.ParseException; import chameleon.oo.language.ObjectOrientedLanguage; import chameleon.support.expression.AssignmentExpression; import chameleon.support.expression.SuperConstructorDelegation; import chameleon.support.expression.SuperTarget; import chameleon.support.expression.ThisLiteral; import chameleon.support.member.simplename.SimpleNameMethodHeader; import chameleon.support.member.simplename.SimpleNameMethodSignature; import chameleon.support.member.simplename.method.NormalMethod; import chameleon.support.member.simplename.method.RegularMethodInvocation; import chameleon.support.member.simplename.variable.MemberVariableDeclarator; import chameleon.support.modifier.Protected; import chameleon.support.modifier.Public; import chameleon.support.statement.ReturnStatement; import chameleon.support.statement.StatementExpression; import chameleon.support.variable.VariableDeclaration; import chameleon.test.provider.BasicDescendantProvider; import chameleon.test.provider.ElementProvider; public class JavaTranslator { public JavaTranslator(SubobjectJava language, ElementProvider<Namespace> namespaceProvider) throws ParseException, IOException { super(); _language = language; _typeProvider = new BasicDescendantProvider<Type>(namespaceProvider, Type.class); } public ElementProvider<Type> typeProvider() { return _typeProvider; } private ElementProvider<Type> _typeProvider; public Java translate() throws ParseException, IOException, ChameleonProgrammerException, LookupException { RootNamespace clone = language().defaultNamespace().clone(); Java result = new Java(); result.cloneConnectorsFrom(language()); result.cloneProcessorsFrom(language()); result.setDefaultNamespace(clone); Map<Type,Type> map = new HashMap<Type,Type>(); for(Type type: typeProvider().elements(result)) { Type newType = translation(type); map.put(newType, type); } for(Entry<Type,Type> entry : map.entrySet()) { SingleAssociation newParentlink = entry.getKey().parentLink(); SingleAssociation oldParentlink = entry.getValue().parentLink(); Association childLink = oldParentlink.getOtherRelation(); childLink.replace(oldParentlink, newParentlink); } return result; } public Language language() throws ParseException, IOException { return _language; } private Language _language; /** * Return a type that represents the translation of the given JLow class to a Java class. */ public Type translation(Type original) throws ChameleonProgrammerException, LookupException { Type type = original.clone(); List<ComponentRelation> relations = original.directlyDeclaredMembers(ComponentRelation.class); for(ComponentRelation relation : relations) { //ensureTranslation(relation.componentType()); // Add a field subobject MemberVariableDeclarator fieldForComponent = fieldForComponent(relation,type); if(fieldForComponent != null) { type.add(fieldForComponent); } // Add a getter for subobject Method getterForComponent = getterForComponent(relation,type); if(getterForComponent != null) { type.add(getterForComponent); } // Add a setter for subobject Method setterForComponent = setterForComponent(relation,type); if(setterForComponent != null) { type.add(setterForComponent); } type.addAll(aliasMethods(relation)); // Create the inner classes for the components inner(type, relation, type); addOutwardDelegations(relation, type); // Replace constructor calls } for(ComponentRelation relation: type.directlyDeclaredMembers(ComponentRelation.class)) { type.setUniParent(original.parent()); replaceSuperCalls(relation, type); replaceConstructorCalls(relation); type.setUniParent(null); relation.disconnect(); } return type; } public void replaceConstructorCalls(final ComponentRelation relation) throws LookupException { Type type = relation.nearestAncestor(Type.class); List<SubobjectConstructorCall> constructorCalls = type.descendants(SubobjectConstructorCall.class, new UnsafePredicate<SubobjectConstructorCall,LookupException>() { @Override public boolean eval(SubobjectConstructorCall constructorCall) throws LookupException { return constructorCall.getTarget().getElement().equals(relation); } } ); for(SubobjectConstructorCall call: constructorCalls) { Invocation inv = new ConstructorInvocation(innerClassTypeReference(relation, type), null); // move actual arguments from subobject constructor call to new constructor call. inv.addAllArguments(call.actualArgumentList().getActualParameters()); Invocation setterCall = new RegularMethodInvocation(setterName(relation), null); setterCall.addArgument(new ActualArgument(inv)); SingleAssociation<SubobjectConstructorCall, Element> parentLink = call.parentLink(); parentLink.getOtherRelation().replace(parentLink, setterCall.parentLink()); } } public void inner(Type type, ComponentRelation relation, Type outer) throws LookupException { Type innerClass = createInnerClassFor(relation,type); //_innerClassMap.put(relation, type); type.add(innerClass); Type componentType = relation.componentType(); for(ComponentRelation nestedRelation: componentType.members(ComponentRelation.class)) { // subst parameters ComponentRelation clonedNestedRelation = nestedRelation.clone(); clonedNestedRelation.setUniParent(nestedRelation.parent()); substituteTypeParameters(clonedNestedRelation, componentType); inner(innerClass, clonedNestedRelation, outer); } } private Map<ComponentRelation, Type> _innerClassMap = new HashMap<ComponentRelation, Type>(); public void addOutwardDelegations(ComponentRelation relation, Type outer) throws LookupException { ConfigurationBlock block = relation.configurationBlock(); for(ConfigurationClause clause: block.clauses()) { if(clause instanceof OverridesClause) { OverridesClause ov = (OverridesClause)clause; QualifiedName qn = ov.oldFqn(); QualifiedName poppedName = qn.popped(); int size = poppedName.length(); Element container = relation.componentType(); if(size > 0) { SimpleReference<Declaration> ref = new SimpleReference<Declaration>(poppedName, Declaration.class); ref.setUniParent(relation.componentType()); container = ref.getElement(); } Signature lastSignature = qn.lastSignature(); SimpleReference<Declaration> ref = new SimpleReference<Declaration>(null, lastSignature.clone(), Declaration.class); Type targetInnerClass = targetInnerClass(outer, relation, poppedName); ref.setUniParent(container); Declaration decl = ref.getElement(); if(decl instanceof Method) { Method<?,?,?,?> method = (Method<?, ?, ?, ?>) decl; Method original = createOriginal(method, original(method.name())); if(original != null) { targetInnerClass.add(original); } Method outward = createOutward(method,((SimpleNameMethodSignature)ov.newSignature()).name(),relation); if(outward != null) { targetInnerClass.add(outward); } } // oldName(....) { // DirectOuterClass.this.newName(); // } } } } public Type targetInnerClass(Type outer, ComponentRelation relation, QualifiedName poppedName) throws LookupException { List<Signature> sigs = new ArrayList<Signature>(); sigs.add(relation.signature()); sigs.addAll(poppedName.signatures()); CompositeQualifiedName innerName = new CompositeQualifiedName(); CompositeQualifiedName acc = new CompositeQualifiedName(); // innerName.append(outer.signature().clone()); for(Signature signature: sigs) { acc.append(signature.clone()); innerName.append(new SimpleNameSignature(innerClassName(outer, acc))); } SimpleReference<Type> tref = new SimpleReference<Type>(innerName, Type.class); tref.setUniParent(outer); outer.setUniParent(relation.nearestAncestor(Type.class).parent()); Type result = tref.getElement(); outer.setUniParent(null); return result; } // public void addOutwardDelegations(ComponentRelation relation, Type innerClass) throws LookupException { // ConfigurationBlock block = relation.configurationBlock(); // for(ConfigurationClause clause: block.clauses()) { // if(clause instanceof OverridesClause) { // OverridesClause ov = (OverridesClause)clause; // QualifiedName qn = ov.oldFqn(); // QualifiedName poppedName = qn.popped(); // int size = poppedName.length(); // Element container = innerClass; // if(size > 0) { // SimpleReference<Declaration> ref = new SimpleReference<Declaration>(poppedName, Declaration.class); // ref.setUniParent(relation.componentType()); // container = ref.getElement(); // } // Signature lastSignature = qn.lastSignature(); // SimpleReference<Declaration> ref = new SimpleReference<Declaration>(null, lastSignature.clone(), Declaration.class); // ref.setUniParent(container); // Declaration decl = ref.getElement(); // if(decl instanceof Method) { // Method<?,?,?,?> method = (Method<?, ?, ?, ?>) decl; // Method original = createOriginal(method, original(method.name())); // if(original != null) { // innerClass.add(original); // } // Method outward = createOutward(method,((SimpleNameMethodSignature)ov.newSignature()).name(),relation); // if(outward != null) { // innerClass.add(outward); // } // } // // oldName(....) { // // DirectOuterClass.this.newName(); // // } // } // } // } /** * * @param relation A component relation from either the original class, or one of its nested components. * @param outer The outer class being generated. */ public Type createInnerClassFor(ComponentRelation relation, Type outer) throws ChameleonProgrammerException, LookupException { NamespacePart nsp = relation.furthestAncestor(NamespacePart.class); // Type parentType = relation.nearestAncestor(Type.class); RegularType componentType = (RegularType) relation.componentType(); NamespacePart originalNsp = componentType.furthestAncestor(NamespacePart.class); for(Import imp: originalNsp.imports()) { nsp.addImport(imp.clone()); } Type stub = new RegularType(innerClassName(relation, outer)); TypeReference superReference; if(relation.nearestAncestor(Type.class).signature().equals(outer.signature()) && (outer.nearestAncestor(Type.class) == null)) { superReference = relation.componentTypeReference().clone(); } else { String innerClassName = innerClassName(relation, relation.nearestAncestor(Type.class)); superReference = new JavaTypeReference(innerClassName); } stub.addInheritanceRelation(new SubtypeRelation(superReference)); List<Method> localMethods = componentType.directlyDeclaredMembers(Method.class); for(Method<?,?,?,?> method: localMethods) { if(method.is(method.language(ObjectOrientedLanguage.class).CONSTRUCTOR) == Ternary.TRUE) { NormalMethod clone = (NormalMethod) method.clone(); String name = stub.signature().name(); RegularImplementation impl = (RegularImplementation) clone.implementation(); Block block = new Block(); impl.setBody(block); // substitute parameters before replace the return type, method name, and the body. // the types are not known in the component type, and the super class of the component type // may not have a constructor with the same signature as the current constructor. substituteTypeParameters(method, clone); Invocation inv = new SuperConstructorDelegation(); useParametersInInvocation(clone, inv); block.addStatement(new StatementExpression(inv)); clone.setReturnTypeReference(new JavaTypeReference(name)); ((SimpleNameMethodHeader)clone.header()).setName(name); stub.add(clone); } } return stub; } public final static String SHADOW = "_subobject_"; public Method createOutward(Method<?,?,?,?> method, String newName, ComponentRelation relation) throws LookupException { NormalMethod<?,?,?> result; if((method.is(method.language(ObjectOrientedLanguage.class).DEFINED) == Ternary.TRUE) && (method.is(method.language(ObjectOrientedLanguage.class).OVERRIDABLE) == Ternary.TRUE)) { result = innerMethod(method, method.name()); Block body = new Block(); result.setImplementation(new RegularImplementation(body)); Invocation invocation = invocation(result, newName); TypeReference ref = getRelativeClassName(relation); ThisLiteral target = new ThisLiteral(ref); invocation.setTarget(target); substituteTypeParameters(method, result); addImplementation(method, body, invocation); } else { result = null; } return result; } public TypeReference getRelativeClassName(ComponentRelation relation) { return new JavaTypeReference(relation.nearestAncestor(Type.class).signature().name()); } public Method createOriginal(Method<?,?,?,?> method, String original) throws LookupException { NormalMethod<?,?,?> result; if((method.is(method.language(ObjectOrientedLanguage.class).DEFINED) == Ternary.TRUE) && (method.is(method.language(ObjectOrientedLanguage.class).OVERRIDABLE) == Ternary.TRUE)) { result = innerMethod(method, original); Block body = new Block(); result.setImplementation(new RegularImplementation(body)); Invocation invocation = invocation(result, method.name()); invocation.setTarget(new SuperTarget()); addImplementation(method, body, invocation); substituteTypeParameters(method, result); // List<CrossReference> crossReferences = // result.descendants(CrossReference.class, // new UnsafePredicate<CrossReference,LookupException>() { // // @Override // public boolean eval(CrossReference object) throws LookupException { // return object.getElement().equals(selectionDeclaration()); // } // // }); } else { result = null; } return result; } private void substituteTypeParameters(Method<?, ?, ?, ?> methodInTypeWhoseParametersMustBeSubstituted, NormalMethod<?, ?, ?> methodWhereActualTypeParametersMustBeFilledIn) throws LookupException { methodWhereActualTypeParametersMustBeFilledIn.setUniParent(methodInTypeWhoseParametersMustBeSubstituted); Type type = methodInTypeWhoseParametersMustBeSubstituted.nearestAncestor(Type.class); substituteTypeParameters(methodWhereActualTypeParametersMustBeFilledIn, type); methodWhereActualTypeParametersMustBeFilledIn.setUniParent(null); } private void addImplementation(Method<?, ?, ?, ?> method, Block body, Invocation invocation) throws LookupException { if(method.returnType().equals(method.language(Java.class).voidType())) { body.addStatement(new StatementExpression(invocation)); } else { body.addStatement(new ReturnStatement(invocation)); } } private NormalMethod<?, ?, ?> innerMethod(Method<?, ?, ?, ?> method, String original) { NormalMethod<?, ?, ?> result; result = new NormalMethod(method.header().clone(), method.getReturnTypeReference().clone()); ((SimpleNameMethodHeader)result.header()).setName(original); ExceptionClause exceptionClause = method.getExceptionClause(); ExceptionClause clone = (exceptionClause != null ? exceptionClause.clone(): null); result.setExceptionClause(clone); result.addModifier(new Public()); return result; } public void substituteTypeParameters(Element<?, ?> result, Type type) throws LookupException { List<TypeParameter> typeParameters = type.parameters(); for(TypeParameter par: typeParameters) { if(par instanceof InstantiatedTypeParameter) { ((InstantiatedTypeParameter)par).substitute(result); } } } public String innerClassName(Type outer, QualifiedName qn) { StringBuffer result = new StringBuffer(); result.append(outer.signature().name()); result.append(SHADOW); List<Signature> sigs = qn.signatures(); int size = sigs.size(); for(int i = 0; i < size; i++) { result.append(((SimpleNameSignature)sigs.get(i)).name()); if(i < size - 1) { result.append(SHADOW); } } return result.toString(); } public String innerClassName(ComponentRelation relation, Type outer) throws LookupException { // return outer.signature().name()+"_"+relation.componentType().baseType().signature().name()+SHADOW+relation.signature().name(); //return outer.signature().name()+SHADOW+relation.signature().name(); return innerClassName(outer, relation.signature()); } public void replaceSuperCalls(final ComponentRelation relation, Type parent) throws LookupException { List<SuperTarget> superTargets = parent.descendants(SuperTarget.class, new UnsafePredicate<SuperTarget,LookupException>() { @Override public boolean eval(SuperTarget superTarget) throws LookupException { return superTarget.getTargetDeclaration().equals(relation); } } ); for(SuperTarget superTarget: superTargets) { Element<?,?> inv = superTarget.parent(); if(inv instanceof RegularMethodInvocation) { RegularMethodInvocation call = (RegularMethodInvocation) inv; Invocation subObjectSelection = new RegularMethodInvocation(getterName(relation), null); call.setTarget(subObjectSelection); call.setName(original(call.name())); } } } public String original(String name) { return "original__"+name; } public MemberVariableDeclarator fieldForComponent(ComponentRelation relation, Type outer) throws LookupException { if(! overrides(relation)) { // MemberVariableDeclarator result = new MemberVariableDeclarator(relation.componentTypeReference().clone()); MemberVariableDeclarator result = new MemberVariableDeclarator(innerClassTypeReference(relation, outer)); result.add(new VariableDeclaration(fieldName(relation))); return result; } else { return null; } } private JavaTypeReference innerClassTypeReference(ComponentRelation relation, Type outer) throws LookupException { return new JavaTypeReference(innerClassName(relation, outer)); } public String getterName(ComponentRelation relation) { return relation.signature().name()+COMPONENT; } public final static String COMPONENT = "__component__lkjkberfuncye__"; public Method getterForComponent(ComponentRelation relation, Type outer) throws LookupException { if(! overrides(relation)) { RegularMethod result = new NormalMethod(new SimpleNameMethodHeader(getterName(relation)), innerClassTypeReference(relation, outer)); result.addModifier(new Public()); Block body = new Block(); result.setImplementation(new RegularImplementation(body)); body.addStatement(new ReturnStatement(new NamedTargetExpression(fieldName(relation), null))); return result; } else { return null; } } public String setterName(ComponentRelation relation) { return "set"+COMPONENT+"__"+relation.signature().name(); } public Method setterForComponent(ComponentRelation relation, Type outer) throws LookupException { if(! overrides(relation)) { String name = relation.signature().name(); RegularMethod result = new NormalMethod(new SimpleNameMethodHeader(setterName(relation)), new JavaTypeReference("void")); result.header().addParameter(new FormalParameter(name, innerClassTypeReference(relation,outer))); result.addModifier(new Protected()); Block body = new Block(); result.setImplementation(new RegularImplementation(body)); NamedTargetExpression componentFieldRef = new NamedTargetExpression(fieldName(relation), null); body.addStatement(new StatementExpression(new AssignmentExpression(componentFieldRef, new NamedTargetExpression(name, null)))); return result; } else { return null; } } private boolean overrides(ComponentRelation relation) throws LookupException { Type type = relation.nearestAncestor(Type.class); for(Type superType: type.getDirectSuperTypes()) { List<ComponentRelation> superComponents = superType.members(ComponentRelation.class); for(ComponentRelation superComponent: superComponents) { if(new SubobjectJavaOverridesRelation().contains(relation, superComponent)) { return true; } } } return false; } public List<Method> aliasMethods(ComponentRelation relation) throws LookupException { List<Method> result = new ArrayList<Method>(); List<? extends Member> members = relation.getIntroducedMembers(); members.remove(relation); for(Member member: members) { result.add(aliasFor(member, relation)); } return result; } public Method aliasFor(Member member, ComponentRelation relation) throws LookupException{ if(member instanceof Method) { Method<?,?,?,?> method = (Method) member; Method<?,?,?,?> origin = (Method) method.origin(); String methodName = fieldName(relation); Method result = new NormalMethod(method.header().clone(), new JavaTypeReference(method.returnType().getFullyQualifiedName())); Block body = new Block(); result.setImplementation(new RegularImplementation(body)); Invocation invocation = invocation(method, origin.name()); invocation.setTarget(new NamedTargetExpression(methodName, null)); if(origin.returnType().equals(origin.language(ObjectOrientedLanguage.class).voidType())) { body.addStatement(new StatementExpression(invocation)); } else { body.addStatement(new ReturnStatement(invocation)); } for(Modifier mod: origin.modifiers()) { result.addModifier(mod.clone()); } return result; } else { throw new ChameleonProgrammerException("Translation of member of type "+member.getClass().getName()+" not supported."); } } private Invocation invocation(Method<?, ?, ?, ?> method, String origin) { Invocation invocation = new RegularMethodInvocation(origin, null); // pass parameters. useParametersInInvocation(method, invocation); return invocation; } private void useParametersInInvocation(Method<?, ?, ?, ?> method, Invocation invocation) { for(FormalParameter param: method.formalParameters()) { invocation.addArgument(new ActualArgument(new NamedTargetExpression(param.signature().name(), null))); } } public String fieldName(ComponentRelation relation) { return "__component_" + relation.signature().name(); } /** * args[0] = path for the directory to write output * args[1] = path to read input files * ...1 or more input paths possible... * args[i] = fqn of package to read, let this start with "@" to read the package recursively *...1 or more packageFqns possible... * args[n] = fqn of package to read, let this start with "#" to NOT read the package recursively. *...1 or more packageFqns possible... * * Example * java Copy outputDir baseInputDir customInputDir1 customInputDir2 @myPackage.subPackage */ public static void main(String[] args) throws Exception { if(args.length < 2) { System.out.println("Usage: java .... JavaTranslator outputDir inputDir* @recursivePackageFQN* #packageFQN* $typeFQN*"); } BasicConfigurator.configure(); Logger.getRootLogger().setLevel(Level.FATAL); Config.setCacheLanguage(true); Config.setCacheElementReferences(true); Config.setCacheElementProperties(true); ProviderProvider provider = new ProviderProvider(new SubobjectJavaModelFactory(),".java",true,true); provider.processArguments(args); long start = System.currentTimeMillis(); Java result = new JavaTranslator((SubobjectJava) provider.language(), provider.namespaceProvider()).translate(); // Output long stop = System.currentTimeMillis(); File outputDir = provider.outputDir(); TypeWriter writer = new TypeWriter(result, new BasicDescendantProvider<Type>(provider.namespaceProvider(), Type.class),outputDir); writer.write(); System.out.println("Translation took "+(stop - start) + " milliseconds."); } }
src/subobjectjava/translate/JavaTranslator.java
package subobjectjava.translate; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import jnome.core.expression.ConstructorInvocation; import jnome.core.language.Java; import jnome.core.type.JavaTypeReference; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.rejuse.association.Association; import org.rejuse.association.SingleAssociation; import org.rejuse.logic.ternary.Ternary; import org.rejuse.predicate.UnsafePredicate; import subobjectjava.input.SubobjectJavaModelFactory; import subobjectjava.model.component.ComponentRelation; import subobjectjava.model.component.ConfigurationBlock; import subobjectjava.model.component.ConfigurationClause; import subobjectjava.model.component.OverridesClause; import subobjectjava.model.expression.SubobjectConstructorCall; import subobjectjava.model.language.SubobjectJava; import subobjectjava.model.language.SubobjectJavaOverridesRelation; import chameleon.core.Config; import chameleon.core.declaration.CompositeQualifiedName; import chameleon.core.declaration.Declaration; import chameleon.core.declaration.QualifiedName; import chameleon.core.declaration.Signature; import chameleon.core.declaration.SimpleNameSignature; import chameleon.core.element.Element; import chameleon.core.expression.ActualArgument; import chameleon.core.expression.Invocation; import chameleon.core.expression.NamedTargetExpression; import chameleon.core.language.Language; import chameleon.core.lookup.LookupException; import chameleon.core.member.Member; import chameleon.core.method.Method; import chameleon.core.method.RegularImplementation; import chameleon.core.method.RegularMethod; import chameleon.core.method.exception.ExceptionClause; import chameleon.core.modifier.Modifier; import chameleon.core.namespace.Namespace; import chameleon.core.namespace.RootNamespace; import chameleon.core.namespacepart.Import; import chameleon.core.namespacepart.NamespacePart; import chameleon.core.reference.SimpleReference; import chameleon.core.statement.Block; import chameleon.core.type.RegularType; import chameleon.core.type.Type; import chameleon.core.type.TypeReference; import chameleon.core.type.generics.InstantiatedTypeParameter; import chameleon.core.type.generics.TypeParameter; import chameleon.core.type.inheritance.SubtypeRelation; import chameleon.core.variable.FormalParameter; import chameleon.exception.ChameleonProgrammerException; import chameleon.input.ParseException; import chameleon.oo.language.ObjectOrientedLanguage; import chameleon.support.expression.AssignmentExpression; import chameleon.support.expression.SuperConstructorDelegation; import chameleon.support.expression.SuperTarget; import chameleon.support.expression.ThisLiteral; import chameleon.support.member.simplename.SimpleNameMethodHeader; import chameleon.support.member.simplename.SimpleNameMethodSignature; import chameleon.support.member.simplename.method.NormalMethod; import chameleon.support.member.simplename.method.RegularMethodInvocation; import chameleon.support.member.simplename.variable.MemberVariableDeclarator; import chameleon.support.modifier.Protected; import chameleon.support.modifier.Public; import chameleon.support.statement.ReturnStatement; import chameleon.support.statement.StatementExpression; import chameleon.support.variable.VariableDeclaration; import chameleon.test.provider.BasicDescendantProvider; import chameleon.test.provider.ElementProvider; public class JavaTranslator { public JavaTranslator(SubobjectJava language, ElementProvider<Namespace> namespaceProvider) throws ParseException, IOException { super(); _language = language; _typeProvider = new BasicDescendantProvider<Type>(namespaceProvider, Type.class); } public ElementProvider<Type> typeProvider() { return _typeProvider; } private ElementProvider<Type> _typeProvider; public Java translate() throws ParseException, IOException, ChameleonProgrammerException, LookupException { RootNamespace clone = language().defaultNamespace().clone(); Java result = new Java(); result.cloneConnectorsFrom(language()); result.cloneProcessorsFrom(language()); result.setDefaultNamespace(clone); Map<Type,Type> map = new HashMap<Type,Type>(); for(Type type: typeProvider().elements(result)) { Type newType = translation(type); map.put(newType, type); } for(Entry<Type,Type> entry : map.entrySet()) { SingleAssociation newParentlink = entry.getKey().parentLink(); SingleAssociation oldParentlink = entry.getValue().parentLink(); Association childLink = oldParentlink.getOtherRelation(); childLink.replace(oldParentlink, newParentlink); } return result; } public Language language() throws ParseException, IOException { return _language; } private Language _language; public Type translation(Type original) throws ChameleonProgrammerException, LookupException { Type type = original.clone(); List<ComponentRelation> relations = original.directlyDeclaredMembers(ComponentRelation.class); for(ComponentRelation relation : relations) { //ensureTranslation(relation.componentType()); // Add a field subobject MemberVariableDeclarator fieldForComponent = fieldForComponent(relation,type); if(fieldForComponent != null) { type.add(fieldForComponent); } // Add a getter for subobject Method getterForComponent = getterForComponent(relation,type); if(getterForComponent != null) { type.add(getterForComponent); } // Add a setter for subobject Method setterForComponent = setterForComponent(relation,type); if(setterForComponent != null) { type.add(setterForComponent); } type.addAll(aliasMethods(relation)); // Create the inner classes for the components inner(type, relation, type); addOutwardDelegations(relation, type); // Replace constructor calls } for(ComponentRelation relation: type.directlyDeclaredMembers(ComponentRelation.class)) { type.setUniParent(original.parent()); replaceSuperCalls(relation, type); replaceConstructorCalls(relation); type.setUniParent(null); relation.disconnect(); } return type; } public void replaceConstructorCalls(final ComponentRelation relation) throws LookupException { Type type = relation.nearestAncestor(Type.class); List<SubobjectConstructorCall> constructorCalls = type.descendants(SubobjectConstructorCall.class, new UnsafePredicate<SubobjectConstructorCall,LookupException>() { @Override public boolean eval(SubobjectConstructorCall constructorCall) throws LookupException { return constructorCall.getTarget().getElement().equals(relation); } } ); for(SubobjectConstructorCall call: constructorCalls) { Invocation inv = new ConstructorInvocation(innerClassTypeReference(relation, type), null); // move actual arguments from subobject constructor call to new constructor call. inv.addAllArguments(call.actualArgumentList().getActualParameters()); Invocation setterCall = new RegularMethodInvocation(setterName(relation), null); setterCall.addArgument(new ActualArgument(inv)); SingleAssociation<SubobjectConstructorCall, Element> parentLink = call.parentLink(); parentLink.getOtherRelation().replace(parentLink, setterCall.parentLink()); } } public void inner(Type type, ComponentRelation relation, Type outer) throws LookupException { Type innerClass = createInnerClassFor(relation,type); //_innerClassMap.put(relation, type); type.add(innerClass); Type componentType = relation.componentType(); for(ComponentRelation nestedRelation: componentType.members(ComponentRelation.class)) { // subst parameters ComponentRelation clonedNestedRelation = nestedRelation.clone(); clonedNestedRelation.setUniParent(nestedRelation.parent()); substituteTypeParameters(clonedNestedRelation, componentType); inner(innerClass, clonedNestedRelation, outer); } } private Map<ComponentRelation, Type> _innerClassMap = new HashMap<ComponentRelation, Type>(); public void addOutwardDelegations(ComponentRelation relation, Type outer) throws LookupException { ConfigurationBlock block = relation.configurationBlock(); for(ConfigurationClause clause: block.clauses()) { if(clause instanceof OverridesClause) { OverridesClause ov = (OverridesClause)clause; QualifiedName qn = ov.oldFqn(); QualifiedName poppedName = qn.popped(); int size = poppedName.length(); Element container = relation.componentType(); if(size > 0) { SimpleReference<Declaration> ref = new SimpleReference<Declaration>(poppedName, Declaration.class); ref.setUniParent(relation.componentType()); container = ref.getElement(); } Signature lastSignature = qn.lastSignature(); SimpleReference<Declaration> ref = new SimpleReference<Declaration>(null, lastSignature.clone(), Declaration.class); Type targetInnerClass = targetInnerClass(outer, relation, poppedName); ref.setUniParent(container); Declaration decl = ref.getElement(); if(decl instanceof Method) { Method<?,?,?,?> method = (Method<?, ?, ?, ?>) decl; Method original = createOriginal(method, original(method.name())); if(original != null) { targetInnerClass.add(original); } Method outward = createOutward(method,((SimpleNameMethodSignature)ov.newSignature()).name(),relation); if(outward != null) { targetInnerClass.add(outward); } } // oldName(....) { // DirectOuterClass.this.newName(); // } } } } public Type targetInnerClass(Type outer, ComponentRelation relation, QualifiedName poppedName) throws LookupException { List<Signature> sigs = new ArrayList<Signature>(); sigs.add(relation.signature()); sigs.addAll(poppedName.signatures()); CompositeQualifiedName innerName = new CompositeQualifiedName(); CompositeQualifiedName acc = new CompositeQualifiedName(); // innerName.append(outer.signature().clone()); for(Signature signature: sigs) { acc.append(signature.clone()); innerName.append(new SimpleNameSignature(innerClassName(outer, acc))); } SimpleReference<Type> tref = new SimpleReference<Type>(innerName, Type.class); tref.setUniParent(outer); outer.setUniParent(relation.nearestAncestor(Type.class).parent()); Type result = tref.getElement(); outer.setUniParent(null); return result; } // public void addOutwardDelegations(ComponentRelation relation, Type innerClass) throws LookupException { // ConfigurationBlock block = relation.configurationBlock(); // for(ConfigurationClause clause: block.clauses()) { // if(clause instanceof OverridesClause) { // OverridesClause ov = (OverridesClause)clause; // QualifiedName qn = ov.oldFqn(); // QualifiedName poppedName = qn.popped(); // int size = poppedName.length(); // Element container = innerClass; // if(size > 0) { // SimpleReference<Declaration> ref = new SimpleReference<Declaration>(poppedName, Declaration.class); // ref.setUniParent(relation.componentType()); // container = ref.getElement(); // } // Signature lastSignature = qn.lastSignature(); // SimpleReference<Declaration> ref = new SimpleReference<Declaration>(null, lastSignature.clone(), Declaration.class); // ref.setUniParent(container); // Declaration decl = ref.getElement(); // if(decl instanceof Method) { // Method<?,?,?,?> method = (Method<?, ?, ?, ?>) decl; // Method original = createOriginal(method, original(method.name())); // if(original != null) { // innerClass.add(original); // } // Method outward = createOutward(method,((SimpleNameMethodSignature)ov.newSignature()).name(),relation); // if(outward != null) { // innerClass.add(outward); // } // } // // oldName(....) { // // DirectOuterClass.this.newName(); // // } // } // } // } /** * * @param relation A component relation from either the original class, or one of its nested components. * @param outer The outer class being generated. */ public Type createInnerClassFor(ComponentRelation relation, Type outer) throws ChameleonProgrammerException, LookupException { NamespacePart nsp = relation.furthestAncestor(NamespacePart.class); // Type parentType = relation.nearestAncestor(Type.class); RegularType componentType = (RegularType) relation.componentType(); NamespacePart originalNsp = componentType.furthestAncestor(NamespacePart.class); for(Import imp: originalNsp.imports()) { nsp.addImport(imp.clone()); } Type stub = new RegularType(innerClassName(relation, outer)); TypeReference superReference; if(relation.nearestAncestor(Type.class).signature().equals(outer.signature()) && (outer.nearestAncestor(Type.class) == null)) { superReference = relation.componentTypeReference().clone(); } else { String innerClassName = innerClassName(relation, relation.nearestAncestor(Type.class)); superReference = new JavaTypeReference(innerClassName); } stub.addInheritanceRelation(new SubtypeRelation(superReference)); List<Method> localMethods = componentType.directlyDeclaredMembers(Method.class); for(Method<?,?,?,?> method: localMethods) { if(method.is(method.language(ObjectOrientedLanguage.class).CONSTRUCTOR) == Ternary.TRUE) { NormalMethod clone = (NormalMethod) method.clone(); // substitute parameters before replace the return type, method name, and the body. // the types are not known in the component type, and the super class of the component type // may not have a constructor with the same signature as the current constructor. substituteTypeParameters(method, clone); String name = stub.signature().name(); RegularImplementation impl = (RegularImplementation) clone.implementation(); Block block = new Block(); impl.setBody(block); Invocation inv = new SuperConstructorDelegation(); useParametersInInvocation(clone, inv); block.addStatement(new StatementExpression(inv)); clone.setReturnTypeReference(new JavaTypeReference(name)); ((SimpleNameMethodHeader)clone.header()).setName(name); stub.add(clone); } } return stub; } public final static String SHADOW = "_subobject_"; public Method createOutward(Method<?,?,?,?> method, String newName, ComponentRelation relation) throws LookupException { NormalMethod<?,?,?> result; if((method.is(method.language(ObjectOrientedLanguage.class).DEFINED) == Ternary.TRUE) && (method.is(method.language(ObjectOrientedLanguage.class).OVERRIDABLE) == Ternary.TRUE)) { result = innerMethod(method, method.name()); Block body = new Block(); result.setImplementation(new RegularImplementation(body)); Invocation invocation = invocation(result, newName); TypeReference ref = getRelativeClassName(relation); ThisLiteral target = new ThisLiteral(ref); invocation.setTarget(target); substituteTypeParameters(method, result); addImplementation(method, body, invocation); } else { result = null; } return result; } public TypeReference getRelativeClassName(ComponentRelation relation) { return new JavaTypeReference(relation.nearestAncestor(Type.class).signature().name()); } public Method createOriginal(Method<?,?,?,?> method, String original) throws LookupException { NormalMethod<?,?,?> result; if((method.is(method.language(ObjectOrientedLanguage.class).DEFINED) == Ternary.TRUE) && (method.is(method.language(ObjectOrientedLanguage.class).OVERRIDABLE) == Ternary.TRUE)) { result = innerMethod(method, original); Block body = new Block(); result.setImplementation(new RegularImplementation(body)); Invocation invocation = invocation(result, method.name()); invocation.setTarget(new SuperTarget()); addImplementation(method, body, invocation); substituteTypeParameters(method, result); // List<CrossReference> crossReferences = // result.descendants(CrossReference.class, // new UnsafePredicate<CrossReference,LookupException>() { // // @Override // public boolean eval(CrossReference object) throws LookupException { // return object.getElement().equals(selectionDeclaration()); // } // // }); } else { result = null; } return result; } private void substituteTypeParameters(Method<?, ?, ?, ?> methodInTypeWhoseParametersMustBeSubstituted, NormalMethod<?, ?, ?> methodWhereActualTypeParametersMustBeFilledIn) throws LookupException { methodWhereActualTypeParametersMustBeFilledIn.setUniParent(methodInTypeWhoseParametersMustBeSubstituted); Type type = methodInTypeWhoseParametersMustBeSubstituted.nearestAncestor(Type.class); substituteTypeParameters(methodWhereActualTypeParametersMustBeFilledIn, type); methodWhereActualTypeParametersMustBeFilledIn.setUniParent(null); } private void addImplementation(Method<?, ?, ?, ?> method, Block body, Invocation invocation) throws LookupException { if(method.returnType().equals(method.language(Java.class).voidType())) { body.addStatement(new StatementExpression(invocation)); } else { body.addStatement(new ReturnStatement(invocation)); } } private NormalMethod<?, ?, ?> innerMethod(Method<?, ?, ?, ?> method, String original) { NormalMethod<?, ?, ?> result; result = new NormalMethod(method.header().clone(), method.getReturnTypeReference().clone()); ((SimpleNameMethodHeader)result.header()).setName(original); ExceptionClause exceptionClause = method.getExceptionClause(); ExceptionClause clone = (exceptionClause != null ? exceptionClause.clone(): null); result.setExceptionClause(clone); result.addModifier(new Public()); return result; } public void substituteTypeParameters(Element<?, ?> result, Type type) throws LookupException { List<TypeParameter> typeParameters = type.parameters(); for(TypeParameter par: typeParameters) { if(par instanceof InstantiatedTypeParameter) { ((InstantiatedTypeParameter)par).substitute(result); } } } public String innerClassName(Type outer, QualifiedName qn) { StringBuffer result = new StringBuffer(); result.append(outer.signature().name()); result.append(SHADOW); List<Signature> sigs = qn.signatures(); int size = sigs.size(); for(int i = 0; i < size; i++) { result.append(((SimpleNameSignature)sigs.get(i)).name()); if(i < size - 1) { result.append(SHADOW); } } return result.toString(); } public String innerClassName(ComponentRelation relation, Type outer) throws LookupException { // return outer.signature().name()+"_"+relation.componentType().baseType().signature().name()+SHADOW+relation.signature().name(); //return outer.signature().name()+SHADOW+relation.signature().name(); return innerClassName(outer, relation.signature()); } public void replaceSuperCalls(final ComponentRelation relation, Type parent) throws LookupException { List<SuperTarget> superTargets = parent.descendants(SuperTarget.class, new UnsafePredicate<SuperTarget,LookupException>() { @Override public boolean eval(SuperTarget superTarget) throws LookupException { return superTarget.getTargetDeclaration().equals(relation); } } ); for(SuperTarget superTarget: superTargets) { Element<?,?> inv = superTarget.parent(); if(inv instanceof RegularMethodInvocation) { RegularMethodInvocation call = (RegularMethodInvocation) inv; Invocation subObjectSelection = new RegularMethodInvocation(getterName(relation), null); call.setTarget(subObjectSelection); call.setName(original(call.name())); } } } public String original(String name) { return "original__"+name; } public MemberVariableDeclarator fieldForComponent(ComponentRelation relation, Type outer) throws LookupException { if(! overrides(relation)) { // MemberVariableDeclarator result = new MemberVariableDeclarator(relation.componentTypeReference().clone()); MemberVariableDeclarator result = new MemberVariableDeclarator(innerClassTypeReference(relation, outer)); result.add(new VariableDeclaration(fieldName(relation))); return result; } else { return null; } } private JavaTypeReference innerClassTypeReference(ComponentRelation relation, Type outer) throws LookupException { return new JavaTypeReference(innerClassName(relation, outer)); } public String getterName(ComponentRelation relation) { return relation.signature().name()+COMPONENT; } public final static String COMPONENT = "__component__lkjkberfuncye__"; public Method getterForComponent(ComponentRelation relation, Type outer) throws LookupException { if(! overrides(relation)) { RegularMethod result = new NormalMethod(new SimpleNameMethodHeader(getterName(relation)), innerClassTypeReference(relation, outer)); result.addModifier(new Public()); Block body = new Block(); result.setImplementation(new RegularImplementation(body)); body.addStatement(new ReturnStatement(new NamedTargetExpression(fieldName(relation), null))); return result; } else { return null; } } public String setterName(ComponentRelation relation) { return "set"+COMPONENT+"__"+relation.signature().name(); } public Method setterForComponent(ComponentRelation relation, Type outer) throws LookupException { if(! overrides(relation)) { String name = relation.signature().name(); RegularMethod result = new NormalMethod(new SimpleNameMethodHeader(setterName(relation)), new JavaTypeReference("void")); result.header().addParameter(new FormalParameter(name, innerClassTypeReference(relation,outer))); result.addModifier(new Protected()); Block body = new Block(); result.setImplementation(new RegularImplementation(body)); NamedTargetExpression componentFieldRef = new NamedTargetExpression(fieldName(relation), null); body.addStatement(new StatementExpression(new AssignmentExpression(componentFieldRef, new NamedTargetExpression(name, null)))); return result; } else { return null; } } private boolean overrides(ComponentRelation relation) throws LookupException { Type type = relation.nearestAncestor(Type.class); for(Type superType: type.getDirectSuperTypes()) { List<ComponentRelation> superComponents = superType.members(ComponentRelation.class); for(ComponentRelation superComponent: superComponents) { if(new SubobjectJavaOverridesRelation().contains(relation, superComponent)) { return true; } } } return false; } public List<Method> aliasMethods(ComponentRelation relation) throws LookupException { List<Method> result = new ArrayList<Method>(); List<? extends Member> members = relation.getIntroducedMembers(); members.remove(relation); for(Member member: members) { result.add(aliasFor(member, relation)); } return result; } public Method aliasFor(Member member, ComponentRelation relation) throws LookupException{ if(member instanceof Method) { Method<?,?,?,?> method = (Method) member; Method<?,?,?,?> origin = (Method) method.origin(); String methodName = fieldName(relation); Method result = new NormalMethod(method.header().clone(), new JavaTypeReference(method.returnType().getFullyQualifiedName())); Block body = new Block(); result.setImplementation(new RegularImplementation(body)); Invocation invocation = invocation(method, origin.name()); invocation.setTarget(new NamedTargetExpression(methodName, null)); if(origin.returnType().equals(origin.language(ObjectOrientedLanguage.class).voidType())) { body.addStatement(new StatementExpression(invocation)); } else { body.addStatement(new ReturnStatement(invocation)); } for(Modifier mod: origin.modifiers()) { result.addModifier(mod.clone()); } return result; } else { throw new ChameleonProgrammerException("Translation of member of type "+member.getClass().getName()+" not supported."); } } private Invocation invocation(Method<?, ?, ?, ?> method, String origin) { Invocation invocation = new RegularMethodInvocation(origin, null); // pass parameters. useParametersInInvocation(method, invocation); return invocation; } private void useParametersInInvocation(Method<?, ?, ?, ?> method, Invocation invocation) { for(FormalParameter param: method.formalParameters()) { invocation.addArgument(new ActualArgument(new NamedTargetExpression(param.signature().name(), null))); } } public String fieldName(ComponentRelation relation) { return "__component_" + relation.signature().name(); } /** * args[0] = path for the directory to write output * args[1] = path to read input files * ...1 or more input paths possible... * args[i] = fqn of package to read, let this start with "@" to read the package recursively *...1 or more packageFqns possible... * args[n] = fqn of package to read, let this start with "#" to NOT read the package recursively. *...1 or more packageFqns possible... * * Example * java Copy outputDir baseInputDir customInputDir1 customInputDir2 @myPackage.subPackage */ public static void main(String[] args) throws Exception { if(args.length < 2) { System.out.println("Usage: java .... JavaTranslator outputDir inputDir* @recursivePackageFQN* #packageFQN* $typeFQN*"); } BasicConfigurator.configure(); Logger.getRootLogger().setLevel(Level.FATAL); Config.setCacheLanguage(true); Config.setCacheElementReferences(true); Config.setCacheElementProperties(true); ProviderProvider provider = new ProviderProvider(new SubobjectJavaModelFactory(),".java",true,true); provider.processArguments(args); long start = System.currentTimeMillis(); Java result = new JavaTranslator((SubobjectJava) provider.language(), provider.namespaceProvider()).translate(); // Output long stop = System.currentTimeMillis(); File outputDir = provider.outputDir(); TypeWriter writer = new TypeWriter(result, new BasicDescendantProvider<Type>(provider.namespaceProvider(), Type.class),outputDir); writer.write(); System.out.println("Translation took "+(stop - start) + " milliseconds."); } }
more work on constructors
src/subobjectjava/translate/JavaTranslator.java
more work on constructors
Java
mit
b9cdc3770fcafc71f0963092b81df8d39e3ef366
0
Wneh/weather-forecast
package NLG; import java.util.ArrayList; import Main.TrendPoint; import POJO.TimeSerie; import POJO.WeatherData; public class Basic { private WeatherData wd; private ArrayList<TrendPoint> trendsTemp; private ArrayList<TrendPoint> trendsWind; public Basic(WeatherData wd, ArrayList<TrendPoint> trendsTemp, ArrayList<TrendPoint> windTrend){ this.wd = wd; this.trendsTemp = trendsTemp; this.trendsWind = windTrend; } public String generateSentenceTemp(){ StringBuilder sb = new StringBuilder(); TrendPoint firstTrend = this.trendsTemp.get(0); TimeSerie first = this.wd.getTimeseries().get(0); System.out.println("First: " + first); //TimeSerie second = this.wd.getTimeseries().get(firstTrend.getIndex()); //Some basic text sb.append("At ").append(first.getClockTime()).append(" o'clock we will have a temperature of ").append(first.getT()).append(" ° C"); //Add if it will increase or decrease in temperature if(firstTrend.getTrend() == TrendPoint.Trend.POSITIVE){ int maxMinTrendIndex = wd.getMaxMinTempTrendIndex(true,0, firstTrend.getIndex()); sb.append(" steady rising until about ").append(wd.getTimeseries().get(maxMinTrendIndex).getClockTime()) .append(" o'clock where it will reach it's maximum temperature, about ") .append(wd.getTimeseries().get(maxMinTrendIndex).getT()) .append(" ° C."); } else { int maxMinTrendIndex = wd.getMaxMinTempTrendIndex(true,0, firstTrend.getIndex()); sb.append(" slowly decreasing until about ").append(wd.getTimeseries().get(maxMinTrendIndex).getClockTime()) .append(" o'clock where it will reach it's lowest temperature, about ") .append(wd.getTimeseries().get(maxMinTrendIndex).getT()) .append(" ° C."); } return sb.toString(); } public String generateSentenceWind(){ StringBuilder sb = new StringBuilder(); TrendPoint firstTrend = this.trendsWind.get(0); TimeSerie first = this.wd.getTimeseries().get(0); //sb.append("At ").append(first.getClockTime()).append(" o'clock we will have a temperature of ").append(first.getT()).append(" ° C"); sb.append("At ").append(first.getClockTime()).append(" o'clock this ").append(first.getDayZone()).append(" we'll see wind speeds of up to ") .append(first.getWs()).append(" m/s blowing in ").append(first.getWindDirection()).append(" direction,"); //Add if it will increase or decrease in temperature int maxMinTrendIndex; if(firstTrend.getTrend() == TrendPoint.Trend.POSITIVE){ maxMinTrendIndex = wd.getMaxMinTempTrendIndex(true,0, firstTrend.getIndex()); sb.append("the wind speed will then build up until about ").append(wd.getTimeseries().get(maxMinTrendIndex).getClockTime()).append(" o'clock"); } else { maxMinTrendIndex = wd.getMaxMinTempTrendIndex(true,0, firstTrend.getIndex()); sb.append("the wind speed will then build up until about ").append(wd.getTimeseries().get(maxMinTrendIndex).getClockTime()).append(" o'clock"); } sb.append(" with speeds of up to ").append(wd.getTimeseries().get(maxMinTrendIndex).getWs()).append(" m/s blowing in the ").append(wd.getTimeseries().get(maxMinTrendIndex).getWindDirection()) .append(" direction"); return sb.toString(); } }
src/NLG/Basic.java
package NLG; import java.util.ArrayList; import Main.TrendPoint; import POJO.TimeSerie; import POJO.WeatherData; public class Basic { private WeatherData wd; private ArrayList<TrendPoint> trendsTemp; private ArrayList<TrendPoint> trendsWind; public Basic(WeatherData wd, ArrayList<TrendPoint> trendsTemp, ArrayList<TrendPoint> windTrend){ this.wd = wd; this.trendsTemp = trendsTemp; this.trendsWind = windTrend; } public String generateSentenceTemp(){ StringBuilder sb = new StringBuilder(); TrendPoint firstTrend = this.trendsTemp.get(0); TimeSerie first = this.wd.getTimeseries().get(0); System.out.println("First: " + first); //TimeSerie second = this.wd.getTimeseries().get(firstTrend.getIndex()); //Some basic text sb.append("At ").append(first.getClockTime()).append(" o'clock we will have a temperature of ").append(first.getT()).append(" ° C"); //Add if it will increase or decrease in temperature if(firstTrend.getTrend() == TrendPoint.Trend.POSITIVE){ int maxMinTrendIndex = wd.getMaxMinTempTrendIndex(true,0, firstTrend.getIndex()); sb.append(" steady rising until about ").append(wd.getTimeseries().get(maxMinTrendIndex).getClockTime()) .append(" o'clock where it will reach it's maximum temperature, about ") .append(wd.getTimeseries().get(maxMinTrendIndex).getT()) .append(" ° C."); } else { int maxMinTrendIndex = wd.getMaxMinTempTrendIndex(true,0, firstTrend.getIndex()); sb.append(" slowly decreasing until about ").append(wd.getTimeseries().get(maxMinTrendIndex).getClockTime()) .append(" o'clock where it will reach it's lowest temperature, about ") .append(wd.getTimeseries().get(maxMinTrendIndex).getT()) .append(" ° C."); } return sb.toString(); } public String generateSentenceWind(){ StringBuilder sb = new StringBuilder(); TrendPoint firstTrend = this.trendsWind.get(0); TimeSerie first = this.wd.getTimeseries().get(0); //sb.append("At ").append(first.getClockTime()).append(" o'clock we will have a temperature of ").append(first.getT()).append(" ° C"); sb.append("At ").append(first.getClockTime()).append(" o'clock this ").append(first.getDayZone()).append(" we will se wind speeds of up to ") .append(first.getWs()).append(" m/s blowing in ").append(first.getWindDirection()).append(" direction,"); //Add if it will increase or decrease in temperature int maxMinTrendIndex; if(firstTrend.getTrend() == TrendPoint.Trend.POSITIVE){ maxMinTrendIndex = wd.getMaxMinTempTrendIndex(true,0, firstTrend.getIndex()); sb.append("the wind speed will then build up until about ").append(wd.getTimeseries().get(maxMinTrendIndex).getClockTime()).append(" o'clock"); } else { maxMinTrendIndex = wd.getMaxMinTempTrendIndex(true,0, firstTrend.getIndex()); sb.append("the wind speed will then build up until about ").append(wd.getTimeseries().get(maxMinTrendIndex).getClockTime()).append(" o'clock"); } sb.append(" with speeds of up to ").append(wd.getTimeseries().get(maxMinTrendIndex).getWs()).append(" m/s blowing in the ").append(wd.getTimeseries().get(maxMinTrendIndex).getWindDirection()) .append(" direction"); return sb.toString(); } }
More typos
src/NLG/Basic.java
More typos
Java
mit
0cd55ba333f9fd98e9f460dcb9713591b42dbac9
0
vagnerereno/RPII
package atos; import ItemJogo.Comida; import audio.Som; import static audio.Som.parar; import java.util.Scanner; import sun.audio.AudioStream; import ItemJogo.Inventario; import ItemJogo.Item; import ItemJogo.ItemAtaque; import ItemJogo.ItemDeCombate; import ItemJogo.Itens; import inimigos.AnaoIA; import inimigos.Inimigo; import java.util.ArrayList; import java.util.Scanner; import motor.EnumEspecialidades; import motor.EnumRacas; import raca.Anao; import raca.Elfo; import raca.Humano; import rpii.Arqueiro; import rpii.Especialidade; import rpii.Guerreiro; import rpii.Raca; public class Ato1 { private Raca jogador; private Raca jogadorTeste; private Inventario dispensa; private String nome; public static void main(String[] args) { Ato0 a0 = new Ato0(); Scanner input = new Scanner(System.in); System.out.println("Olá jogador, qual o seu nome?"); String nome = input.next(); System.out.println(nome + " Qual especialidade você quer ter?"); System.out.println(" 1 - Arqueiro - "); System.out.println(" 2 - Guerreiro - "); System.out.println(" 3 - Mago - "); int especialidade; especialidade = input.nextInt(); System.out.println("Então " + nome + " qual raça você gostaria de ser?"); System.out.println("1 - Anão - "); System.out.println("2 - Elfo - "); System.out.println("3 - Humano - "); System.out.println("4 - Orc - "); System.out.println("5 - Undead - "); int raca; raca = input.nextInt(); Ato1 as = new Ato1(a0.criarJogador(nome, EnumEspecialidades.values()[especialidade], EnumRacas.values()[raca])); as.abrirInventario(as.getDispensa(), "dispensa"); } public Ato1(Raca jogador) { this.jogador = jogador; inicializarJogador(); // Ato0 atinho = new Ato0(); // this.jogador = atinho.criarJogador(); // abrirInventario(getDispensa(), "dispensa"); } public long Enredo_1() throws Exception { Som.fase1(); System.out.println(""); System.out.println(getJogador().getNome() + ", vive numa pequena vila hermitao, se exilou, passado obscuro voltando de uma cacada, avista fumaca vindo da vila proxima \n" + "onde estao todos os seus amigos, encontra um sobrevivente nos escombros, que lhe conta quem destruiu, vagamente, e morre() \n" + "foi a caravana... disse o sobrevivente usando toda sua energia que o mantinha vivo " + getJogador().getNome() + " se lembra que eles estão buscando artefatos\n" + "para reviver inimigo pica que tem poderes de mandar em criaturas e esse inimigo so pode ser derrotado com armas imbuídas com uma \n" + "magia perdida dos antigos contos de taodistante agora, precisa achar pedacos de adamantium pra montar ritual de criacao dessas \n" + "armas sagradas! furioso, " + getJogador().getNome() + " vai para sua casa se preparar para a sua jornada em busca de adamantium para conseguir sua vinganca\n" + " Mas tome CUIDADO! pois o caminho possui altos exercitos e alguns lideres q comandam a hierarquia da caravana"); fase_1(); return 0; } public long fase_1() throws Exception { Scanner e = new Scanner(System.in); System.out.println("." + ".#####...######..##...##..........##..##..######..##..##..#####....####..\n" + ".##..##..##......###.###..........##..##....##....###.##..##..##..##..##.\n" + ".#####...####....##.#.##..........##..##....##....##.###..##..##..##..##.\n" + ".##..##..##......##...##...........####.....##....##..##..##..##..##..##.\n" + ".#####...######..##...##............##....######..##..##..#####....####..\n" + "........................................................................."); System.out.println("Pois bem, então " + getJogador().getNome() + " parte em sua longa jornada.\n" + "Nos arredores da vila Kenko, você descança, sem abrir os olhos voce pensa: acordo ou durmo mais um pouco?\n" + "1-SEGUIR DORMINDO 2-ACORDAR"); int resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { Som.galinha(); System.out.println("As galinhas cacarejam enlouquecidamente anunciando o novo dia, por vezes você sente vontade de dar cabo delas \n" + "devido a irritação sonora recorrente. Agora desperta por completo."); break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-SEGUIR DORMINDO 2-ACORDAR"); resp = e.nextInt(); } System.out.println("Abrindo os olhos, ainda meio confuso, percebe que esqueceu de apagar a vela, e agora o cheiro de cera invade o recinto.\n" + "░▒░░░░░░░░░░░░░▒░░░░░░░▒░░░░░░░░░░░░░░\n" + "░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░░▒\n" + "░▒░░░▒░▒░▒░▒░▒░▒░▒░▒░▒░▒░▒░░░▒░░░░░░░░\n" + "░▒░░▒░▒░▒░▒░▒░▒░▒▒▒▒▒░▒░▒▒▒░▒░▒░░░░░░░\n" + "░▒░▒▒▒░▒░▒▒▒░▒▒▒▒▒▒▒▒▒▒▒░▒░▒░▒▒▒░▒░░░▒\n" + "░▒░░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒▒░▒░░░░▒\n" + "░▒░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒░░░▒\n" + "░▒▒░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒░░▒\n" + "░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒░▒░▒░▒\n" + "░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓██▓▒▒▒▒▒▒▒▒▒░▒▒▒░▒▒\n" + "░▒░▒░▒▒▒▒▒▒▒▒▒▒▒▒▓███▓▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒░░▒░▒▒▒▒▒▒▒▒▒▒▒▒▓██▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▓█▓▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒▒░▒░▒▒▒▒▒▒▒▒▒▒░▓████░▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░▒▒▒▒▒▒▒▒▒▒▒░█████░▒▒▒▒▒▒▒░▒░▒▒▒░▒\n" + "░▒▒░░░▒░▒▒▒▒▒▒▒▒░████▓░▒▒▒▒▒▒▒▒▒▒░▒░░▒\n" + "░▒░░░▒░▒▒▒▒▒▒▒▒▒░████▓░▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒░░▒░▒▒▒░▒░▒▒▒░░██▓▓▓░░▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░▒░▒▒▒▒▒▒▒▒░░██▓▓▓░▒▒▒▒▒▒▒▒▒▒▒░░░▒\n" + "░▒░░▒░▒░▒░▒░▒▒▒░░██▓▓▒░▒▒▒▒▒▒▒▒░▒░▒░░▒\n" + "░▒░░░▒░▒▒▒░▒▒▒▒▒░██▓▓▒░▒▒▒▒▒▒▒▒▒░▒░▒░▒\n" + "░▒░░░░▒░▒░▒▒▒▒▒░░▓█▓▓▒░▒▒▒▒▒▒░▒▒▒▒▒░▒▒\n" + "░▒░▒░▒▒▒░▒▒▒▒▒▒▒░██▓▓▒░▒▒▒▒▒▒▒▒▒░▒░▒░▒\n" + "░▒▒░▒░▒░▒▒▒▒▒▒▒░░▓█▓▓▒░▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░░░░░░░▒░░▒░░██▓▒▒░▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒░░░░▒▒▒▒▒▒▒▒▒▒▒██▓▓▒▒▒▒▒▒▒▒▒▒░░░▒░▒▒\n" + " ░░░░▒▒▒▒░░▒▒▒▒▒░░░░░░░░░░░░ ░░░░░\n" + "░░░░░░▒▒▒░ ░▒▒▒▒░░░░\n" + "▓█████▓▓▒▒▒ ▒▒▒▒▓▓████\n" + "██████████████████████████████████████" + "\n1-APAGAR VELA 2-IR AO BANHEIRO"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("\n" + "░▒░░▒░▒▒▒▒▒▒▒▒▒▒▒▒▓██▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▓█▓▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒▒░▒░▒▒▒▒▒▒▒▒▒▒░▓████░▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░▒▒▒▒▒▒▒▒▒▒▒░█████░▒▒▒▒▒▒▒░▒░▒▒▒░▒\n" + "░▒▒░░░▒░▒▒▒▒▒▒▒▒░████▓░▒▒▒▒▒▒▒▒▒▒░▒░░▒\n" + "░▒░░░▒░▒▒▒▒▒▒▒▒▒░████▓░▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒░░▒░▒▒▒░▒░▒▒▒░░██▓▓▓░░▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░▒░▒▒▒▒▒▒▒▒░░██▓▓▓░▒▒▒▒▒▒▒▒▒▒▒░░░▒\n" + "░▒░░▒░▒░▒░▒░▒▒▒░░██▓▓▒░▒▒▒▒▒▒▒▒░▒░▒░░▒\n" + "░▒░░░▒░▒▒▒░▒▒▒▒▒░██▓▓▒░▒▒▒▒▒▒▒▒▒░▒░▒░▒\n" + "░▒░░░░▒░▒░▒▒▒▒▒░░▓█▓▓▒░▒▒▒▒▒▒░▒▒▒▒▒░▒▒\n" + "░▒░▒░▒▒▒░▒▒▒▒▒▒▒░██▓▓▒░▒▒▒▒▒▒▒▒▒░▒░▒░▒\n" + "░▒▒░▒░▒░▒▒▒▒▒▒▒░░▓█▓▓▒░▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░░░░░░░▒░░▒░░██▓▒▒░▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒░░░░▒▒▒▒▒▒▒▒▒▒▒██▓▓▒▒▒▒▒▒▒▒▒▒░░░▒░▒▒\n" + " ░░░░▒▒▒▒░░▒▒▒▒▒░░░░░░░░░░░░ ░░░░░\n" + "░░░░░░▒▒▒░ ░▒▒▒▒░░░░\n" + "▓█████▓▓▒▒▒ ▒▒▒▒▓▓████\n" + "██████████████████████████████████████" + "\nVela apagada!\n "); break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-APAGAR VELA 2-IR AO BANHEIRO"); resp = e.nextInt(); } System.out.println(getJogador().getNome() + " vai até o lavabo atirar agua em sua face para dissipar a preguiça ainda remanescente, a agua está\n" + "um arrepio percorre todo o seu corpo, você encara-se no velho espelho e contempla sua triste feição\n" + "trazendo flashes das lembranças de seu passado de mercenário, tempos de matança em prol do atual reino.\n" + "Voltando a si, ainda contemplando sua face no reflexo:\n"); /* + "Qual será sua classe?\n" + "1-HUMANO 2-ELFO 3-ORC 4-ANAO 5-UNDEAD"); resp = e.nextInt(); while (resp != 1 || resp != 2 || resp == 3 || resp == 4 || resp == 5) { if (resp == 1) { break; } if (resp == 2) { break; } if (resp == 3) { break; } if (resp == 4) { break; } if (resp == 5) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-HUMANO 2-ELFO 3-ORC 4-ANAO 5-UNDEAD"); resp = e.nextInt(); } System.out.println("1-ESCOLHER NOVAMENTE 2-IR PARA O QUARTO"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("Qual será sua classe?\n" + "1-HUMANO 2-ELFO 3-ORC 4-ANAO 5-UNDEAD"); resp = e.nextInt(); while (resp != 1 || resp != 2 || resp == 3 || resp == 4 || resp == 5) { if (resp == 1) { break; } if (resp == 2) { break; } if (resp == 3) { break; } if (resp == 4) { break; } if (resp == 5) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-HUMANO 2-ELFO 3-ORC 4-ANAO 5-UNDEAD"); resp = e.nextInt(); } break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-ESCOLHER NOVAMENTE 2-IR PARA O QUARTO"); resp = e.nextInt(); }*/ System.out.println("Então você dirige-se para o quarto para equipar-se\n" + "Hora de comer alguma coisa\n" + "1-IR PARA A DISPENSA 2-CAÇAR SUA COMIDA"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { abrirInventario(getDispensa(), "dispensa"); System.out.println(getJogador() + " percebendo a pouca quantia de alimento em sua dispensa decide ir caçar na floresta"); System.out.println("Mochila!"); abrirMochila(jogadorTeste.getInventario(), "mochila"); break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-IR PARA A DISPENSA 2-CAÇAR SUA COMIDA"); resp = e.nextInt(); } System.out.println("Olhando para perto da porta da frente, encontra sua arma\n" + "1-SAIR DA CASA 2-PROCURAR POR COMIDA DENTRO DA CASA"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 2) { System.out.println("\n" + " ░▒▒▓▓▓▓▓▓▓▒▒░ \n" + " ▒▓▓███████▓▓▓▓▓▓▓███████▓▓▒░ \n" + " ▒▒▓▓███▓▓▒▒▒▒▒▒▒▒▓▓▓▓▒▒▓▒▓▓▓▓██████▓▒ \n" + " ▒▓██▓▒░▒░▓▓██████████████████████▓▓▓▒▒▒███▒ \n" + " ▒███▓░░▓███▓█████████████████████████████▓▓▓▓██▓▒ \n" + " ▒███░░▓██▓███▓▓█████▓█████████████████████████▓▓▓███▓ \n" + " ███▒░▒▓███▓███▓▓█████▓███████████████████████████▓▒▒███░ \n" + " ███░░██▒███▓▓███▓█████▓█████████████████████████████▒░███▒ \n" + " ███▒▒█▒██▒███▓███▒█████▓█████████████████████████████▓▓▒███░ \n" + " ▓███▒█▓▓▓█▓▓██▓▓██▓█████▓▓█████████████████████████▓█▓██░████ \n" + " ▒▓▒▓▒ ░▓▓▓▓▓█▒██▓▓██▓▓▓▓▓▓▓▓█████████████████████████▓█▓█▓░▒▒▓▓ \n" + " ▒▓▓ ███░▓▓▓▓▓██▒██▓▓▓▓▓█████▓████████████████████████▓▓▓█▓▒ ▒██▓ ▒ \n" + " ░█▓▓ ▓██▒ ▓▓▓▓▒█▓▓▓▓▓██▓██████████████████████████████▓█▓▓▒ ░███░▒▓▒ \n" + " ▒▓▓█▒ ███░ ▒▓▓▓▒▓▒██▓███▓███████████████████████████▓▓█▓▓▓░ ███▒░▓▒▓░ \n" + " ▒█▓▓█▒ ▓██▒ ░▒▓▒▒█▓██▓███▓▓░ ░▒░▓▓▓▓████████████████▓▓█▓░ ███░▒▓▓▓▓▒ \n" + " ▒▓▓▓█▓█▓ ▒███▒ ░▒▓▓█▓██▓▒ ▒▒▒░ ░ ░ ░▒▒▓▓██████████▓▒▒▓██▓░▓▓█▓▓█▓ \n" + " ░ █▒▓█▓▓███▒░▒███▒░▒▒▒▓▓▓█▓▓▓███████████████▓▓████▓██▓▓▓▒▒▓██▒░▒▒██▒█▓▓▒░ ░ \n" + " ▒░▒▓█▓▒▓██████▒░▒▓███▓▒░░███████████████████████████▓▓▓▓█▓▓▓░▒▓██▓▓██▓█▒▓▓ ▒ \n" + " ▓░▓▓█▓▓▓███▓▓████▓▓▓▓▓▓▓▓▒▒▒▒▒ ░▒▒░░▒▒▒▓▓▓▓▓▓▓▓▓▓█▓▓▒▓▓▒▒▒▒▓▓▓████▓▓██░█▓█▒▒▒ \n" + " ░▓░▓█▓▓█▓▒▓▓▓████████▓▓▓▒▒▒▒▒▒▒░░▒░░░░ ░░░░ ░░▒▒▓████▒▓████▓▒▓▓▓█▓▓░▓ \n" + " ▓▒▒▓█▓████▒▒▓██████████████▓▓▒▒░░░ ░░▒▒▓▓███████████▒███▓▒░██▓█▓▒█▓░\n" + " █▓▓ ▓████▓██▓▒▓▓█▓█████████████████████████████████████▓███████▓▓▓▒▓█▓▓█▓▒█░█▒▓\n" + " █▓▓▒ ███▓▓████▓▓▒▒▓▓███████████████████████████████████▓▓████▓▓▒▒▓████▒▓██░▓▓▓▒\n" + " ░██▓▒ ▓█▓████████▓▓▓▒▓▓▓██████████████████▓█████████████▓▓▓▓▒▓▓█▓▓████▓▓█░▓▓░█░\n" + " ██▓▓▓▒░█████████████▓▓▓▓▒▓▓▓▓▓▓██████████▓██████▓▓▓▓▓▓▓▒▓▓██████▒█████▒░▓▒▓█▓ \n" + " ▓██▓▓▓▒▓████▓███████████▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓█▓▓███████▓▓██▓░▒▓▓▓▓█ \n" + " ▓██▓▓▓▒▒▓▓▓▓██████████████████████▓▓▓▓▓▓▓█████████████▓████████▓▓▒▒▒▓▒███ \n" + " ░▓██▓▓▓▓▓▓▓███████████████████████████▓██████████████▓██████▓▒▒▒▒▓▓███▓ \n" + " ▒████▓▓▓▓▓▓▓▓▓██████████████████████▓██████████████▓▓█▓▒▒▒▒▒▓▓████▓ \n" + " ░▓████▓▓▓▓▓▓▓▓▓▓▓█████████████████▓███████████▓█▓▓▒▒▒▒▒▓▓█████▒ \n" + " ░▓██████▓▓▒▓▒▒▒▓▓▓▓▓▓██████████▓███▓▓▓▒▒▒▓▓▒▒▒▒▓▓▓██████▒ \n" + " ▒▓███████▓▓▓▒▒░░▒▒░░░░░░▒▒▒░░░▒▒▒▒░▒▒▒▒▓▓███████▓▒ \n" + " ░▒▓█████████▓▓▓▓▒▒▒▒▒▒▒▓▒▓▓▓▓██████████▒▒░ \n" + " ░▒▒▓▓▓████████████████▓▓▓▒▒░ "); System.out.println("Voce nao encontra nada, sua barriga esta roncando e sua saude diminuindo, entao sai da casa."); break; } if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-SAIR DA CASA 2-PROCURAR POR COMIDA DENTRO DA CASA"); resp = e.nextInt(); } Som.porta(); System.out.println("\n" + " \n" + " ░ \n" + " ░ ░▒ \n" + " ░░ ▒░ \n" + " ▒ ░ \n" + " ▓ ▒▒ \n" + " ▒█▓▓█▓ \n" + " ░▒░░░▓█ ░ \n" + " ▒▒▒▒█▓ ░░ \n" + " ▒▒▒▒█▓ ░░ \n" + " ▒▒░░█▓ ░ \n" + " ░▒▓▓▓▓▓▓▓▒█▓ ▒ \n" + " ▒▓████████████▓░ ▒█▓░ \n" + " ░███▓▓▓▓███████████▓▒░░ ▒▒▓▓ \n" + " ▓█▓▓▓████▓█▓███▓███████████▓░ ▒▒▓▒ \n" + " ░█▓ ▒▓███▓▓▓█▓▓▓█▓███████████ ▒▒▓▒ \n" + " ░█▒ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███████▓ ░▒▓█▓███▓ \n" + " ░█▒ ▒▓▓▓▒▒░ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████▓█▓████▓ \n" + " ░█░ ░▒▓█▓▓█▒ ▒█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████░ \n" + " ░█▒ ░░ ▒████▒ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▓ \n" + " █▒ ░░░▒▓▓██▒ ░ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████▓ \n" + " ▓▓ ░░▒░░▒▓▓▓▓░░░░ ▒▓▓▓▓▓▓▓▓▓▓█▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████ \n" + " ▓▓ ░▒▒▒░░▒░░░░░▒░░ ▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███▓██ \n" + " ▓█ ▒▒▒▒░░░░░░░░▒▒▒▒░ ▓▓▓▓▓▓▓▓▓▓▓▓▓█▓███▓▓▓▓▓▓▓▓▓▓▓▓▓█▓ ██ \n" + " ▒█░ ░▒▒▒▒░ ░▒░░░ ▒▒▒▒░ ░▓▓▓▓▓▓▓▓▓▓▓███████▓▓▓▓▓▓▓▓▓▓▓██░ ░ ██ \n" + " ▓▒ ░░▒░░░▒▒▒▒▒▒▒▒▓░░░░░░ ░▓▓▓▓▓▓▓█████▓░░▒▓███▓▓▓▓▓████▓▒▒▒▒ █▓ \n" + " ░ ░░▒▒▒░░█▓▓██████▒ ▒▒░░░▒░▒ ▒███░░░ ░▒▒ ▒▓░████▓▒▒▓▓▒███▒▒░▒█ \n" + " ▒▓░░▒▒░▓▓▓▓▓▓▓▓█▒░▒░░▒░▓░ ░██▓ ░ ░██░ ░ ████ ░▓▒▓█▓▓▓▓ \n" + " ▒▓▒░░▒░▓▒▓▓▓▓███▒░░▒▓▒▒▓░ ░▓▓▓░░░░░██▒░ ░░██▓▓░░░▒▓▓▓▓▓▓▓▒ \n" + " ▒▒▒▒░░░█▓▒▓▓████▒ ▒▒▒▒░▓░ ░▓▓▓░░▒▒░▓█░░░░░▒██▓▓░▒░▒▓▓▒▓▓▒▓▓ \n" + " ░▒▒▒▒░▒▒▓▒▓▓▓▓▓▒░░▒░▒▒▒▓▓▓█▓▓▒▒░░▒▒░░░░░░░░░░░▒▓▒▒░░▓▒▓▓▓▓▓█▒ \n" + " ░▓▒▓▓▒▓▓▒▓█▓▒▓▓▓▒░ ▒▒▒▓▒▓▓▓▓██▓▒ ▒▒▒░░░▒ ░░░▒▓▓▓▓███▓▓███████░ \n" + " ░▒▓▓▒▓▓▒▒▒░▒▒░░░▒▒▓▓▒▒▓▒▒▒▒░░▒▒▓▒░░▒▒░░░░░░▒█▓▒▒▒▒▒▒▓▒▓█▓▒▓▓▓████▓░ \n" + " ▓▓▓▒░▒▒░░▒▒░░░░ ░▒░▒▓▒▒▒▒▓▒▒ ░░▒▓▓▓▒▒▒░░░░░▓▓▓▒▒▓▒▒▒░▒▒▒▒░░ ▒░▒▓██▓ \n" + " ░▒▒░░▒▓▓▒▒▒▒▒░ ░░░▒▒░ ░▒▓▒░░░ ░▒▒▒▒░▒▓▓▒▒▒▒▒▓▒ ░░ ░ \n" + " ░ ░ " + "\nAo sair, vislumbra seu pequeno casebre de madeira cedido pelo ancião da vila após voce salva-lo em certa ocasião\n" + "este imóvel é sua morada a alguns anos, sempre solitariamente acolhendo seus pensamentos e seu corpo cansado.\n" + "Virando-se para a densa floresta de Lavitan\n" + "1-SENTAR UM POUCO 2-ENTRAR NA FLORESTA"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println(getJogador().getNome() + " recupera um pouco de sua saude durante seu descanso, depois disso adentra a floresta de Levitan."); break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-SENTAR UM POUCO 2-ENTRAR NA FLORESTA"); resp = e.nextInt(); } Som.entradaFloresta(); System.out.println("\n" + "█████████████████████████████████▓ █████████████████████████████████████████████\n" + "████████████████████████████████▓▓▓▓████████████████████████████████████████████\n" + "███████████████████████████████▒▒▒▒▒▒▒███████████████████████████▓██████████████\n" + "██████████████▒░█████████████▓▒▒▒▒▒▒▒▒▒▓████████████████████████▒▒▒█████████████\n" + "████████████▓▒▒▒▒▓██████████▒▒▒▒▒▒▒▒▒▒▒▒▒▓███████▓█████████████▓▓▓▒▒▓███████████\n" + "███████████▒▒▒▒▒▒▓▓███████▓▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒█████▓▒▒▓██████████▒▓▓▒▒▓▒▓██████████\n" + "██████████▒▒▒▒▒▒▒▒▓▒▓███▒░░░░▒░░░▒▒▒▒▒░▒░▒░ ░██▓▒▓▒▒▒████████▒▓▒▒▒▒▒▒▒▒█████████\n" + "████████▓▒▒▒▒▒▒▒▒▒▒▒▒▓██▓▓▒▒▒▒▒▒▒░░░▒▒▒▒▒▒▒█▓▓▓▒▒▒▒▒▒▒▓████▓▒▒▒▒▒▒▒▒▒▒▒▒▒███████\n" + "██████▓▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▓█▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▒▒▒▒▒▒▒▒▒▓█ ░▒░▒▒░▒▒▒░▒▒░ ░▓████\n" + "█████░░░▒▒▒▒▒░▒▒▒░▒▒▒░░░▒▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒████\n" + "█████▓▓▓▒▒▒▒▒▒▒▒▒▒▒▓▒ ░▒▓▒ ▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ░ ▒░▒▒▒▒▒▒▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓██████\n" + "████▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒░▒▒▒▒▒░▒▒░▒▓▒ ▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░░████\n" + "██░ ░▒░░▒▒▒▒▒▒▒▒▒▒▒▒▓▓ ░▓▓▒▒░▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▓▓▒▒▒▓▒▒▒░░▒▒░▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒███\n" + "██▓███▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ▒░ ▒▒▒▒▒░▒▒▒▒▒░░▒▒▒▒░ ▒▒▒▒░▒▒▒▒▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓████\n" + "████▓░▒░▒░░▒▒▒▒▒▒▒░░▒░░░░▓▒▒▒▒▒▒▒▒░▒░▒▒▒▒▒▒▒ ░░░░▒ ░▒▓▒░▓▒░▒░░░▒▒▒▒▒▒▒░▒░▒▒▒ ▒▓█\n" + "███▓░▒░░▒▒▒░░▒▒▒░░▒▒▓▓░▒▓▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▒▒▒▓▒ ▒▒▒▒▓▒▒░ ░ ▒▒▒▒▒▒░▒▒▒░░▒▒▒▒ ░▒▒░ ▒\n" + "█████▓▓▒▒▒▒▒▒▒░▒▒▒▒▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒░░▒▒▒▒░▒▒▒▓▓▒▒▒▒▒▒▒░░▒▒▒▒▒▒▒░▓████\n" + "███▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒░░▒▒▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░███\n" + "██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒░░ ▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓ ░▒▒ ▒▒▒▓▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░ ░█\n" + "▓ ░░▒░▓▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▒░▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ▒▒▒▓░ ▒▒░ ▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓░░█▓▒░▒\n" + "▓▓███▒▓▒▒▒▒▒▒▒▒▒▒▒▓▓▒▓░▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▓ ░▒▒▓▒▒▒▓▒░▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ █████\n" + "████▒▓▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒░▒▓▒▒▒▒▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒ ▒▒▒▓▓▒▒ ▓▓▒▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒ ▓███\n" + "███▓▓▓▒▒░░░░░░░▒▒▒▒▒░░█▒▒▒▒▒▒▒ ░░░▒▒▒▒▒▒▒▓░ ░░░▒░ ▒▓░▒▒▒▒░ ░░░░░░▒▒▒▒▒▒▒ ▓██\n" + "██▓░▒░░ ░░▒ ░ ▓▒░░░░░░▒ ░░░ ░ ▒░░░░░▒▓ ░▓░░░░░▒░ ░ ░ ░░░░░░▒▒ ██\n" + "██░░ ░▒▓▓▒▒ ░▒▓▒░ ▒▓ ░ █▓░ ▓▓▓▒░ ░░▒░░▒ ▒ ▒▒░ ░░░▒▒▒▒░ ▒▒▓▓░ ▓▒▒░░▒▒░ ▓█\n" + "██▒▓█████▓ ▒▓▓▓▓ ███████████ ░▓▓▓▓▓ ████████▒ ▒▓▓▒ ███████▒ ░▒▓▓▓░▓████████▓██\n" + "██████▓▓███ ░▒▓▓▓ ████████████▒ ▒▒▒▓▒ ████▓████ ▓▓▓▒▒█████████ ▒▓▓▓░░████▓██████\n" + "▓████▓▒▒▓██ ▒▒▒▒▒ ▓▒▓████▓▓░██▓ ▓▒▒▒▒░▓█▓▒▒▓███ ▓▒▒░▒██▓ ██▓██ ░▒▒▓░▓▒▓██ ▒█████\n" + "▒ ▒█▓▒▒░▒▒▒▒▒▓▒▒▒▒▒ ▒▓▓▓▒░▓▒▓▓▒▒▒▓▒▒▓▓▒▒▒▒▒▒▓█ ▒░▒▒▒▓▒▒ ▓▒ ░▓ ▒▒▒▒▒▓▒▒▓▒▒▒▒▓▓▒▒\n" + "██░░▒▒░▒░▒▒▒▒▒▒▒▒▒▒░ ░▒░░░▒▒░░▒░▒░▒▓▒░▒▒▒░░▒▒▒░ ▒▒▓▒▒▒▒░▒▒▒▒▒▒▓▒ ▒▒▓▒░░░▒▒░▒▒ ▒█"); System.out.println("\nvoce caminha calmamente por uma pequena trilha, ela mal pode ser vista pois a vegetação muito densa\n" + "mas sua notável experiência ela se torna apenas uma trilha normal, seguindo mais adiante voce depara-se com\n" + "uma bifurcação.\n" + "Ao norte leva para uma grande elevação, a leste, a segunda trilha entra em um terreno mais baixo e umido.\n\n" + " \n" + " ▒▒░ \n" + " ░▒▒▒ \n" + " ░▒▓▒ ▒▒▒░░░ \n" + " ░▒▓▒ ░▒▒▒▓▓▓▒▓▒▓▓▓░\n" + " ░▒▒▒ ░▒▒▒░▓▒▒▒ ▒▒▓▒ \n" + " ░░▒▒▒░░ ░▒▓▒ ░▒▒▒▒ ░░ ░▒▒▓▓▒ \n" + " ▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░ ▒▒░ ░░▒▒▒▒▒▒▒▒▒▒▓▓▓▒░ \n" + " ▒▒░░░ ░░░▒▒▒▒▒▒▒░░░ ▒░▒▒▒░░░░░░▒▒▒▒░ \n" + " ░▒░ ░▒░▒▒░▒▒░░░░░▒▒▒ ░▒▒▒▒▒▒▒▒░░░░ \n" + " ░░░▒▒▒░░░░░░░▒▒▒▒▒▒░░▒▒▒ ▒▒▒▒▒▒▒▒▒▒▒░ \n" + " ░░░▒▒▒▒▒▒▒▒▒▒ ░▒▒░ ▒░░░ ▒▒▒▒▒▒▒▒▒░ \n" + " ░▒▒▒▒▒▒░░▒▒▒▒▒░░░▒▒▒▒▒▒▒▒ ░░░▒▒▒▒▒▒▒▒▒ \n" + " ░▒▒░ ░ ░ ▒▒▒▒▒▒░▒▒ ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒ \n" + " ▒▒▒▒░▒░ ▒▒▒▒▒░░ ▒▒▓░ ░░▒▒▒▒▒▒▒▒▒░ \n" + " ░▒▒▒▒▒▒▒▒▒▒░ ░▒▒▓▒ ░░▒ \n" + " ░░░ ░▒▒▓▒ \n" + " ░▒▒▓░ \n" + " ░▒▒▓░ \n" + " ░▒▒▓ \n" + " ░▒▒▒ \n" + " ░▒▒▒ \n" + " ░▒▒▒ \n" + " ░▒▒▓ \n" + " ░▒▒▓░ \n" + " ░▒▒▓░ \n" + " ▒▒▒▒▒ \n" + " ▒▒▒▓▒ \n" + " ░▒▒▒▒▒ \n" + " ░▒▒▒▓▒ \n" + " ▒▒▒▒▒ \n" + " ▒▒▒▓░ \n" + " ░▒▒▓░ \n" + " ▒ ▒▒▓ \n" + " ░█▒█▒ ░▒▒ \n" + " ▓▓██▓ ▒▒▒ ▒▒▒░ \n" + " ▒▓▓▓█▒ ▒░░▓▓█▓▒ \n" + " ░▒▒▓▓▓ ▒▒████████ \n" + " ▓▓▓▓██ ░▒▒▓███████ \n" + " ▒█▓▓▒ ▒▓▒ "); System.out.println("1-IR PARA O LESTE 2-IR PARA O NORTE"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { irLeste(); break; } if (resp == 2) { irNorte(); break; } System.out.println("Numero invalido, tente novamente\n" + "1-IR PARA O LESTE 2-IR PARA O NORTE"); resp = e.nextInt(); } System.out.println(""); System.out.println("PARABENS! voce acaba de concluir a primeira fase do Ato1.\n" + "1-INICIAR FASE 2"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("Mochila!"); abrirMochila(jogadorTeste.getInventario(), "mochila"); fase2(); break; } if (resp == 2) { System.out.println("Mochila!"); abrirMochila(jogadorTeste.getInventario(), "mochila"); System.out.println("MOSTRAR STATUS"); break; } System.out.println("Numero invalido, tente novamente\n" + "1-INICIAR ATO 2 2-VERIFICAR STATUS"); resp = e.nextInt(); } System.gc(); return 0; } public long irLeste() throws Exception { Scanner e = new Scanner(System.in); int resp; System.out.println("Seguindo adiante neste caminho a vegetação diminui de tamanho, mas nao perdendo em densidade por parte das pequenas vegetações\n" + "voce sente em seus passos que o solo esta ficando cada vez mais umido e mole por certas vezes seus pes afundam na lama\n" + "caracteristica de terrenos alagados.\n" + "" + " \n" + " \n" + " \n" + " \n" + " \n" + " ░░ ░░ ░▒▒▒▒ ░░░░░ \n" + " ░░░░░░▒░▒▒░▒▒▒▒▒▒▒▒░░░░░░ \n" + " ░▒▒░░▒▒▒▒░░▒▒▒▒░▒░▒░▒▒░░░░░ \n" + " ▒▒░ ░▓▒░▒▒▒░▒▒░▒▒▒▒▒▒░░▒▒▒░▒▒░ \n" + " ░▒▒▒▒▒░▒▒▒░ ▒▒░▒░▒▒▒░░░░░▒▒▒▒▒▒▒░░░▒▒▓▒ \n" + " ░▒▒▒▓▒▓▒▒░ ▒▒▒▒░▒▒▒▒░░░░░▒▒▒▒▒▒░░░░░▒▒▒▒▒ ░ \n" + " ░▒▒▒▒▒▒▒▒▒▒▒▒░░▒▒░▒▒▒▓▒▒░▒▒░▒▒▒░▓▓░▒░░░░░▒▒▒▒▒▒▒░▒░ \n" + " ▒▓▒▓▒░▒▒▒░░▒▒▒▒░░▒░░▒▒░░▒▒▒▒▒▒░▒░▒▒▓▒▒▒░░▒▒▓▒▓▓▒▒▒▒▒▒▒░░ \n" + " ░▒▒░▒▒░▒▓▓░▒▒░▒▓▒▒░▒▒░▒▒░▒▒▒░░▒▒░▒▒▒▒▓▓▒▒▒░▒▒▒▓▒▒▓▒▒▓▒▒▒▒▒▒ \n" + " ▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▓▒▒░░░▒▓▒▒▒▒░░▒░░▒▒▒▒▒░░▒░▒▒▒▒▒▒▒▓▓▓▒▒▓▓▒▒▒▒▓▓░ \n" + " ░▒▒▓▓▓▓▓▒░▒▒▒▒░▒▒▒▒▒░░░▒▒▓▒░▒░░▒▒░▒▒▒░▒▒▓▒▒▒▒▒▒▒▒ ▓▓▓▒░▒▓▒▒▒▒▓▓▒ \n" + " ▒▒▒▒▒ ░░░░ ░▒▒▒▒▒▒▓▓▒▒▓▓░▒▒░▒▒▒▒░▒▒░ ▒▒░▓▓▓▒▒▒▒░▒▓▒▒▒▒▒▒░░▒▒▓▓ \n" + " ░▒▒▒░░░▒░░▒▒▒▒▒▓▒▒▓▓▒▒▓▓▓█▓▓▓▓▓▒▒▒░░▒░░░▒▒░ ▒▒░▒▓▒░░▓▒▒░▒▒▒░▒▓▒░▒▒▒▓▓ \n" + " ▒▒▒▒▒▒▒▒▒▒▒▓▒▓▓▓█▓▓▒▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒░░▒░▒░▒▒░░▒▒▒▒▓░▒░▒░▒░░▒▒▒░░▒▓▓▓▒▒ \n" + " ░░▒▒▒▒░▒▓▓▒░▒▓▓▓▓▓▓▓▓▓▓▓▓▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒░▒▒░▒▒░▒▒ ░░▒▒░░▒▒░ ▒▓▓▓▓▒ \n" + " ░░ ░░░▒░ ▒▓░▒▓▓▓██▓█▓█▓▓▓▒▒▓▓▓▓▓▒▒▒░▒▒▒░▒▒▒▒▒▒▒▒▒░░░░░░ ░▒▒░░▒░░░░░▒░░▒▒▒▒▒▒▒▓▒░ \n" + " ▒▓▓▒░▒▒░ ░█▒░▓█▒▓██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒▒▒░░▒▒▒▒░░░░░░░ ░░▒░▒▒░░░ ░░░░░▒▒▒▒ \n" + " ░▒▒░░░░ ██░ ░░ ▒░ ▒░░▒▒▒▒░▒▒▒▒░▒▒▒░▒░▒▒░░░░░▒▒░▒▒▒░░░░░░▒░░░ ░ ░░░░░░▒▒ \n" + " ░▒░ ▒▓░██▓░▒ ░░ ░░░▒░░░░░░░░ ░░░░░░░░░░▒▒▒▒░ ░░ ░░░ ░ ▒░░░▒▒ \n" + " ▓▓ ▒▓▒▒░░░░ ░░░░▒░▒░▒▒▒▒░ ░░ ░▒▒░▒░ ░░░░░ \n" + " ▓▒ ░░░░░░░░▒░▒░░ ▒▒▒░ ░░▒░ \n" + " ░░░░░░▒▒░ ▒░░ ░▒░ \n" + " ░ ░░░░▒▒░ ░▒░ ░▒░ \n" + " ░▒░░ ░░▒▒▒ ░░░ ░▒░ \n" + " ░░░ ░░░▒░ ░▒ ░▒░ \n" + " ░░ ░▒▒░░ ░░ ░░ \n" + " ░░ ░▒░ ░░ ░░ \n" + " ░░ ░▒░ ░░ \n" + " ░░░ ░░ \n" + " ░░░ ░ \n" + " ░░ \n" + " ░░░ " + "Ao longe ouve o som da vegetação sendo pisada ou mexida observando calmamente voce avista um imponente javali\n" + "os pelos escuros dão um tom sombrio a fera, contrastando com suas presas brancas como o leite, suas patas e focinho estão cobertas de lama.\n" + "com seu equipamento em maos, e seu instinto de caça agucado, prepara-se para o ataque no desavisado animal.\n" + "aproximando-se sorrateiramente por trás da criatura, segurando firmemente sua arma e tentando fazer o minimo de barulho possivel."); System.out.println("1-ATACAR FEROSMENTE 2-ATACAR SILECIOSAMENTE"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { Som.javali(); System.out.println("A criatura percebe seu movimento e começa a fugir, com muita dificuldade voce consegue alcansa-la"); break; } if (resp == 2) { Som.javali(); break; } System.out.println("Numero invalido, tente novamente\n" + "1-ATACAR FEROSMENTE 2-ATACAR SILECIOSAMENTE"); resp = e.nextInt(); } Som.cervo(); System.out.println("com um rápido e súbito movimento você lanca-se sobre a fera que num grunhido de dor e aflição cai por terra\n" + "ao lado, voce solta sua bolsa e empunha sua pequena adaga para dilacerar a fera e coletar sua carne e seu couro"); System.out.println("1-ESFOLAR O ANIMAL"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-ESFOLAR O ANIMAL"); resp = e.nextInt(); } System.out.println("você começa a descarnar o animal, coletando o conteudo da caçada."); Som.esfolar(); AnaoIA javaliLeste = new AnaoIA("Javali", new Guerreiro()); Inventario loot = getRecompensa(javaliLeste, getJogador().getClasseJogador()); abrirInventario(loot, "Javali"); return 1; } public long irNorte() throws Exception { Scanner e = new Scanner(System.in); int resp; System.out.println("\n" + " \n" + " ░░ \n" + " ▒▓ \n" + " ▒▓▓ \n" + " ░▓▓░ ░ \n" + " ░░▓▓ ▒ \n" + " ▒▓██▓▓ \n" + " ░▓▓▓▓▓▒▒▒░ \n" + " ░▒▓▓▒██▓▓▒ \n" + " ░░░▒▓▓░░▒░░ \n" + " ▒▓▓██████▓▓▒░ ░ ▒▓▓█▓▓▒▒▒▒▒░ \n" + " ▒████████████████▓░ ██▓ ░░▒▒░██▒▓▓▓░ \n" + " ▓███▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▓▓▒▒▒█▓ ▓██▒ \n" + " ░███▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓▒███▒▓█▒ ▒▒▓▓▓▒▒▓█░▓▒▒▒░░░░░ \n" + " ░██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓░▓█████▓▒▓▓░▒▓██▓▓▓▓▓█▓▒▓▓▓███▓▒░ \n" + " ██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██░░█████████▒▒▓▒██▓▓█▒▒▓▓██▒▒ ░▒░░▒░ \n" + " ▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓░▓████████████▒▒▒▒▒▓▒ ▓█▓░▒▓▒░ \n" + " █▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▒▒████████████████▓▒▒▒▓▒███░▒▒▒▓█▓▓▒▒░░ \n" + " ▒█▓▓▓▓▓▓▓▓▓▓▓▓█▓███▓▒▒█████▓█████▒▓██▒▒█▓▒▓▓▓████▓▓█▒████▓▒░░ \n" + " ▒█▓▓▓▓▓▓▓▓▓▓██▓▓▓▓▒▒▒▒▒▒▓██▓░▓▓██▒░▒▓▓██▓▓▒▓▒░████░▓▓▓▒▒▒░ \n" + " ░██▓▓▓▓▓▓▓▓██▒▒▒▒▒▒▒▒▒▒▒▒░▒▓▒▒▒▒▓█▒▒▒▓▒▒▒▓▒▒▒▒█▓██░ \n" + " ██▓▓▓▓▓▓██▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒▒██▒██░ \n" + " ▒█▓▓▓▓██▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒██▒▓▓▓▓▓▒▒░ \n" + " ▓█▓▓██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▒▒██░▒███▓▒▓██▓▓▒░ \n" + " ▒██▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓█▓▒░█▓▒▒▒▓██▓░░ \n" + " ▒█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓██▓▓▒▒▒███░▒▒▒▒▓██▓▓▒░▒▒▒ \n" + " ▒█▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▒▒▒▒▓███▓░▒███▒▒▓▓▓▓▓░ \n" + " ▓█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓█▓▒▒▓█▓▒▓███████▒█▓▒▒▓▓▓▓▓▒░\n" + " ▓█▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▒▒▒▒▒▓▓▒▓█▒▒░▓▓▒▓▓▒▓▓▒▒ \n" + " ▒██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▒▒▓▒▒▓▓▓▓▓▓▓▓▓▓▓█▒ " + "\nCom muitas rochas entre as grandes arvores e arbustos de variados tamanhos, avancar por aqui se torna por vezes bastante dificultoso\n" + "tendo que frequentemente escalar ou desviar das grandes rochas. Mais alguns minutos de caminhada voce se depara com uma pequena falésia\n" + "pelo seu tamanho reduzido ainda é possivel escalá-la, mas ainda assim esta acao pode vir a ser perigosa caso algo de errado.\n" + "O pensamento de contorná-la ou encontrar um ponto melhor para escalada passa por sua mente."); System.out.println("1-ESCALAR 2-CONTORNAR"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("Utilizando de suas grandes capacidades físicas escalar tal elevação se torna brinquedo de criança mas ainda assim o instinto\n" + "diz a voce para seguir com cuidado, nunca se sabe quando uma pedra solta ou lisa ou sem aderencia suficiente vai ser a proxima a ser\n" + "escolida como alavanca. Ao terminar de subir voce avista a sua frente mais claridade que o normal para o meio de uma floresta."); System.out.println("1-AVANÇAR 2-VOLTAR E CONTORNAR"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("" + " \n" + " ▒ ░ ▒░ ░██ \n" + " ░█░ █░ ░ ░█▓ ██ \n" + " ▓█ ▒█ ▒ ░█ ▓█ ██ \n" + " ██▓███▓ ████████▓█▓░ \n" + " ▓█████▓▓▓▓████▓▒▒ ░░░▒ \n" + " ▒▒ █████▓▓███▓ \n" + " ░██████████▓ \n" + " ░▓██████████▒ \n" + " ▓███████████▓ \n" + " ░▒░ ▓██████ \n" + " █████▓▓░ ░▒▒▒░ ░░▒▒▒▓▓▓▓████▓▒ \n" + " ██▒░ ▓█████████████████████████████░ \n" + " ░▓██░▒████████████████████████████████ \n" + " ████████████████████████████████████▒ \n" + " ████████████▓████████▓▒▒██████████▒█▒ \n" + " ▒███████████████████▓▒▒▓████████▒ ▓█ \n" + " ▓█▓▒██████████████████████████░ ░ \n" + " ▒█ ████▓████████████▒ ▓██████ \n" + " ███▓░▒▒▒▓▓▒▒▒▓█ ░ ░█████▒ \n" + " ███░░ ░██▓▓░████▒ \n" + " ▒▓ ██░ ░███▒ ▒███▒ \n" + " ███░ ░ ██ ▓░ ███▒ ███ \n" + "▒█▓▓▓ ▒▒ █▒ ▓░ ▓█▒ ░█▓ \n" + "▒▒ ▓█░▓ ▓█ █ █▓ █▓ \n" + "▒░ ▓███ ░ █▓ █▓ ██ █▓ \n" + "▒▓ ▓▓█▓ ▓░ ░ ▒░░ ██ ██ ▒█░ ██ \n" + "▒█▒▓░███▒ ▓░▓▓▒ ██ ██ ░██ ▒ ░██ ░\n" + " ▓█▓░███▒▒░ ░▒▒▓ ▒█▒ █▓ ░ ▒█▒ ░ ███░▒██ ░\n" + " ░▓ ▒░ ░▒▒ ░░ ░ ▒▒░ ░ " + "\nno centro da clareira agora avistada totalmente esta um majestoso cervo, com seus grandes chifres e seu andar despreocupado.\n" + "voce ja consegue sentir o sabor de sua suculenta carne em sua boca. E com este pensamento prepara-se para o ataque."); System.out.println("1-ATACAR"); Som.cervo(); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { System.out.println("O cervo mesmo ferido ainda tenta correr para o abrigo da floresta, mas antes de atingir seu objetivo tomba, levantando leivas de grama com sua pesada galhada."); } System.out.println("Numero invalido, tente novamente\n" + "1-ATACAR"); resp = e.nextInt(); } System.out.println("Você começa a descarnar o animal, coletando o conteudo da caçada."); Som.esfolar(); AnaoIA cervoNorte = new AnaoIA("Cervo", new Guerreiro()); Inventario loot = getRecompensa(cervoNorte, getJogador().getClasseJogador()); abrirInventario(loot, "Cervo"); System.out.println("Apos a tarefa voce percebe que grande parte do dia já se passou e é sabio retornar antes que a noite caia sobre Lavitan, apesar desta parte ser apenas sua borda ainda assim, varios perigos podem surgir."); return 2; } if (resp == 2) { return 2; } System.out.println("Numero invalido, tente novamente\n" + "1-AVANÇAR 2-VOLTAR E CONTORNAR"); resp = e.nextInt(); } } if (resp == 2) { System.out.println("Voce segue esgueirando-se da vegetação e rochas, contornando a pequena falésia. Ela parece nunca terminar\n" + "mas voce segue destemidamente seu caminho em busca de algo para caçar.\n" + "Após um bom tempo de caminhada você ja se sente cansado, pois o esforço de andar nesta parte da floresta nomeDaFloresta é grande.\n" + "Passa pela sua mente o pensamento de voltar para casa e ir na vila nomeDaVila comprar alimentos\n" + "embora não seja a opção mais barata neste momento parece bastante convidativa."); System.out.println("1-VOLTAR PARA CASA 2-SEGUIR EM FRENTE"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 2) { System.out.println("" + "▓████▓▓▓██▓▓▓▓▓████▓██████████████▓█▓█▓▓▓▓█▓▓▓▓▓▓▓██▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓██▓▓▓▓▓▓█\n" + "██▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓█▓▓█▓▓████████████████████████▓███████▓▓▓▓▓▓▓█████████▓█████████████▓█▓███████████████▓██████\n" + "██ ░▒░▒▒░▓▓▓ ▒░░░░▒█▒▒ ▓░▒▓▒▒▒ ░░░▓██▓▓▓▓▓█████▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒░░░░▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓▓\n" + "▓▓░▒▒▒▒▒▒▓▓▓▒▓▒▓▒▒▒█▒▒░▒░▓▓▓▓░▒▒▒▒█████▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▒░░░░▒░░▒░ ░░░▒▒▒▒▒▓██▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓▓▓▓▓█\n" + "▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓█▓▒██████▓███████▓█▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▒░░░░▒▒▒░▒▒▒▒▒▒▒░░░▒▒▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█\n" + "▓▓▓▓██████▓▓▓▓▓▓██▒ ░▒▒▒█ ▒▒░░▓██▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███▓███▓▓▓▓▓▓█▓▒░▒▒ ░░▒▒▓▒▒▒▒▒▒░ ░▒▓▒░▒░░░░▒▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█\n" + "▓▓▓███▓█▓▓████▓▓▓█▓▓▒▓▓▒▓▓▓▓▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓▒▒▒▒▓▓▓▓████▓░▒░▒▓▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▒░ ░▒░░▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█\n" + "▓▓▓▓██▓▓▓▓█████▓▓▓▓▓█████████▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒░▒░░▒▒░░░░▒▒▒▒▒ ▒▒▒░▒▒▒▒▒▒▒▒▒░░░░▒▒░░▒▒░░░░░▒▒▒▒▒░▒▓▓▓▓▓▓▓▓▓▓▓█▓▓▓▓█\n" + "▓█▓▓▓█▓▓██▓███▓▓▓▓▓▓▓▓▓▓▓█████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒░ ░░ ░▒░▒░░░░░░ ░░░░░ ░▒▓▒▒░░▒▒▒░▒▒▒▓▓▓▒░▒▒░▒▒░░▒▒▒▒▒▒▓▓█▓▓▓▓▓▓▓▓▓▓▓█\n" + "███████████▓▓▓▓▓▓███▓▓▓▓▓▓▓▓▓███▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▒▒░▒▒░░▒░░▒░░░░▒░░░░░░▒░░░▒▒▒▒▒▒░░░▒▒▒▒▒▒▒░░░░░▒▒▒▒▒▒▒░░░▒▒▓▓▓▓▓▓▓▓▓▓▓▓\n" + "██▓█▓█████████▓█████▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▒▒ ▒▒▒▒▓▒░▒▒░▒▒▒▒▒▒░░░░▒▒▒▒▒▒░ ░▒▓▒ ░▓▒▒▒▒▒▒░ ░▒▒▒▒░░░░▒░░░░░░▒██▓▓▓▓▓▓▓▓▓\n" + "███▓▓▓███▓██▓▓█▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▒░░▒▒▓▒▒░░▒▒▒▒▒░░░ ░ ░░░░ ░▒▒▒▒▒▒▒▒░▒▒░░▒▒▓▒▒░░░░ ░░░░░ ░░░░▒▓▓▓▓▓▓▓▓▓▓▓\n" + "▓█▓█▓████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒░░░░▒▒▓▒░▒▒░▒▒▒▒▒▒░░ ░ ░░▒▒▒▒▒░▒░░▒▒▒▒▓▓▓▒▒▒▒░░░░ ░▒▒▒▓▓▒▒░ ░▒▒▒▒▓▓▓▓█\n" + "▓█▓▓█▓█▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▒░░▒░░░░▒▒▒▒▒▒▒▒▓▓▓▒░▒░░░░░░░░░░░ ░░▒▒░ ▒▒▓▓▓▒░░░░░░▒▒░░░▒▒▒▒░▒▒▒░░░░▒▒▒▒░▒▓▓▓\n" + "▓█▓▓▓▓▓▓▓▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓ ▒▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒░▒ ░░ ░░ ░░░░░░▓▒░░░ ░▒▒░░▒▒▒▒▒░░ ░ ░ ░░░░ ░▒▒░░▒▒░▒▓▒▒▒▒▒▓\n" + "██▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓░░▒░▒░▒░▒▒░ ░░░░░░ ░░░ ░░░░░ ░▒░▒▒▒░░░░▒▒▒▒▒▒░ ░▒▒░ ░░░░░░░▒░░░░▓▓▒▒▒▒▒\n" + "██▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓░░░▒▒ ░▒▒▒░░▒▒░░▒░░░▒░░░░░ ░░▒▒░░░░░░░▒░▒▒▒▒░░▒░░ ░▒▒▒▓▒▒▒▒ ░░░░░░░░▒▒░▒░░▒▒▒▓▒▓\n" + "▓██▓▓▓▓▓▓██▓███▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▒▒░▒░░░░ ▒▒▒▒▒▒▒░▒▒▒░░░▒▒▒░░ ▒▒▒▒░░░░░ ▒░░░░░░░░▒▒░ ░░▒▒▒▒▒▒▓▒▒░ ░░░▒▒▒▒▒░▒▒▒▒▒▒▒▓\n" + "██▓███▓▓▓▓▓▓████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒░░▒░░░░ ▒░░▒▒░▒▒▒▒▓▒░░░░▒▒░░ ░▒░░░ ░ ░▒▒░░░▒▒▒▒▒░░░░▒░░▒▒▒▒░░░░░ ░░░ ░▓▓▒▒▒▒▒▒▒▒▒▓\n" + "▓█▓▓████▓▓▓▓▓▓▓▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▒░░░░▒▒▒░░▒▒░▒▒ ░▒▒▓▒ ░░░▒░░░░ ░▒▒▒▒░░ ░▒▒▒▒▒▒▓▓▒░░░░ ░ ░▒▒░ ░░░░ ░░ ░▒▒▒▒▒▒▓▒▒▓\n" + "▓█▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▒ ░▒▒▒░▒▒▒▒▒▒▒▒▒░▒▓▒ ░░▒░ ░░░░▒▒ ░░░░░░▒░░░▒░░░░░░░░░░░▒▒▒▒▒▒░░▒▒▓▓░▒▒▒▒░ ░░▒▓\n" + "▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓░░▒▓░░░░░░░░░▒░░▒▒░░░░▒▒░░░░░░ ░░░░░ ░░▒▒░░ ░░▒▒░░▒▒▒▒▒░░▒▒▒▒▒▓▒░▒▒▒▒▒▒░░░░ \n" + "▓█▓▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▒░░▒░▒▒▒▒░ ░▒▒░░▒▒░▒▒░░▒░░░░░░ ▒▒▒▒░░ ░▒░░ ░░▒░▒▒░ ░▒▒░░░▒▒▒▒▒▓▓░░ ▒▒▒▒▒▒▓▓▒▒▒\n" + "███▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓ ░░░▒▒▒░░ ░▒▒░▒▒░░░▒░░░░░░▒░░▒░ ░▒▒▒░░ ▒░░░░░▒░▒▒▒▒░░░░ ░░░▒░▒▒▒░▒▒▒▒▒░▒▒▒░▒▒▒░░▒▒▒░░\n" + "▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▒ ░▒▒▒░░ ░ ░▒▒▒░ ░░░░░░░░░░░░ ░ ▒░ ░▒▒▒░░░▒▒▒░░ ░▒░▒░░▒▒▒▒▒▒▒▓▒ ░▒▒▒░▒░ ░░ ░░▒\n" + "▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓░░▒░░░░░░░░░░░░░ ░▒▒▒░░░░░░░░░░░ ░ ░░░▒▒░ ░ ░░░ ░░░ ░░░░▒░▒░░░░▒▒▒▒░▒▒▒▒▒ ░ ░▒▒▒▒▒▒▒▒▒\n" + "▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓ ░▒░░░░░░░░░░ ░ ░░░░░░▒░▒░░░ ░░░ ░ ░░░ ░ ░▒▒▒▒░░░░░░░░░░ ░▒▒▒▒░░░▒░░▒▒░░▒▒▒▒▒░░▒░░▒▒▒\n" + "▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒░▒░░░░░▒▒▓▒▒▒░░░░░░░ ░▒░▒▒░░▒▒░░░░░░ ░░░░ ░ ░ ░▒░░░░░░░▒░░░░░░░▒▒▒░░░░▒▓▒▒▒▒░▒▒▒░▒▒░░░ ░░░░\n" + "▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓░▒▒▒▒▒▒░░░░▒▒░▒▒▒░░░░░░░░░▒░▒▒▒░░ ░░░ ░░ ░░░░░ ░░░ ░ ░░░░░ ░░ ▒▒░░▒░░▒▒▒▒░░ ░░░░▒░░░░░░░▒\n" + "▓█▓▓▓▓▓▓▓▓▓▓▓██▓██████▒░▒▒▒░▒░░ ░▒▒░▒▒▒▒▒░░ ░ ░░▒▒▒░▒▒░ ░▒░ ░ ░▒░ ░░░ ░░░▒░░░░░░ ▒▒░▒▒░ ░▒▒░░ ░▒▒▒▒▒░░▒▒▒▒▒▒\n" + "▓▓▓▓▓▓▓▓▓▓▓██▓▒▒▒▒░░░░░░ ░░░░░░░░░░░▒▒▒▒▒░░ ▒▒▒░▒▒▒▒▒░░░░ ░ ░ ░░░░ ▒▒▒▒▒▒▒▒▒▓▒░░░▒▒▒▒▓▒▒▒▒▒▒▒▒\n" + "▓▓▓▓▓▓▓▓▓▓█▓░ ▒░ ░ ▒▓▒░▒░░░▒▒░ ▒▒▒▒▒░░░▒░▓▒░░░▒░░░░░░ ░ ░ ░░░ ░ ░░░░░ ░░░░░ ░▒▒░░░░▒▒▓▒▒░░░░▒░░░▒▒▒▒▒░\n" + "▓█▓▓▓▓▓▓▓▓▓ ░ ░░░░░ ▒▒░░ ░░░░░░░░▒▒▒▒▒▒▒░░▒▒▒▒░░▒▒░░░░▒▒▓▓▒▓▒▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒░ ░ ░ ░░▒▒▒▒▒▒▓▓▒▒▒░░▒▓▓▒▒▒▒▓▒▒\n" + "▓█▓▓▓▓▓▓▓█▒ ░░░▒▒▒▒░▒▒░░▒░ ░░▒░▒▒▒▒▒░▒▒▒▒░▒░░▒▒▒░ ▒▒▒▒▒▓▓▓▓▓▓▒▒▒▒▒▒▒░▒▒▒▒▒▒▒▒▒░░░░ ░░ ░░░▒▒▒▒░▒▒▒▒▒░▒▒▒░░ ░░░░\n" + "█░ █▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░ ░░░▒▒▒▒░░░▒▒▒▒▒▒░ ▒▒▒░░ ░▒▒ ░░▒▒▒░▒▒▒▒▓▓▒▒▓▒▒▒▒▒▒░░░ ░░▒▒░░▒▒▒ ░ ░▒▒░ ░ ░░▒▒▒▒▒░░ \n" + "█░ ███▓▒░░▓▓▒░░▒▒▒▒░░░░░░ ░ ░░░░▒▒▒░░░░░▒ ░▒▒░▒▒░░ ▒▒░▒▒▒▒▒▒░░ ░░░▒▒▒▒▓▓▓▒░░░░ ▒▒▒▓▒▒▒▒▒▒░░░░ ▒░▒░░ ░░░░ ░░░▒░░░░░\n" + "█▓▒▓░░▒▒▒▒▒░░░░▒▒░ ░ ░ ░ ░░░ ░░░░▒░░▒▒▒▒░░░░░░░░░░ ░▒▒░░░░▒▒▒░░▒▒▒▒░░ ░░░░░░ ░▓▓▓▒░▒▒▒▒▒▒░░░░░▒░░░░░░░░░░░░░░░▒▒▒░▒\n" + "▓▓▒░ ▒▒▒▒▒░░▒▒▒░ ░ ░░░░░░░▒░▒░░▒▒░ ░░░░░ ░▒░░░░░▒▒ ░░░░░░░░▒░ ░░░░ ░▒▒▒▓▒░▒▒▒▒▒▒▒▒▒░░▒░░░░░░░░░░░ ░░ ░░░░\n" + "▒▓▓▒▒▓▒▒░░░▒░░░ ░░░ ░░░░░░░░░ ░░ ▒▒▒░░▒▓░░░░░░░░░░▒▒ ░ ░░░░░ ░░▒▒▒▒▒░▒▒░ ░░ ▓▒░░▒▒░▒▓▒▒▒▒▒▒▒▒░ ░ ░ ░\n" + "▒▒░▒▓▒▒▒░░▒░░░░░▒▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒░ ▒▒ ░▒▒▒▒▒░░░░ ░░░ ░▒▒▒▒▓▓▒▓▓▒▒▒▓▒▒▒▒▒▒▒▒░░░░░░ ▒▒▒░▒▒░▒▒▒▒▒▒▒▓▒▒▒▒░ ░░░░░ ░ ░░░░▒▒▒ " + "\nVocê segue bravamente seu caminho, sempre atento para os sons da floresta, nunca se sabe quando uma presa pode surgir.\n" + "Mas apesar dos seus esforços você se encontra em um “beco” sem saída formado pelas rochas\n" + "nesta parte da falésia não é possivel escalar devido a inclinacao vertical da formação.\n" + "Farto da situação e sem encontrar nenhum alvo para sua caçada voce decide voltar para casa e ir à vila comprar algo"); return 2; } if (resp == 1) { return 2; } System.out.println("Numero invalido, tente novamente\n" + "1-VOLTAR PARA CASA 2-SEGUIR EM FRENTE"); resp = e.nextInt(); } } System.out.println("Numero invalido, tente novamente\n" + "1-ESCALAR 2-CONTORNAR"); resp = e.nextInt(); } return 2; } public long fase2() throws Exception { Scanner e = new Scanner(System.in); int resp; System.out.println("ATO 2"); System.out.println(""); System.out.println("" + " \n" + " ▒▒░ \n" + " ░▒▒▒ \n" + " ░▒▓▒ ▒▒▒░░░ \n" + " ░▒▓▒ ░▒▒▒▓▓▓▒▓▒▓▓▓░\n" + " ░▒▒▒ ░▒▒▒░▓▒▒▒ ▒▒▓▓ \n" + " ░░▒▒▒░░ ░▒▓▒ ░▒▒▒▒ ░░ ░▒▒▓▓▒ \n" + " ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░ ▒▒░ ░░▒▒▒▒▒▒▒▒▒▒▓▓▓▒░ \n" + " ▒▒░░░ ░░░▒▒▒▒▒▒▒░░░ ▒░▒▒▒░░░░░░▒▒▒▒░ \n" + " ░▒░ ░▒░▒▒░▒▒░░░░░▒▒▒ ░▒▒▒▒▒▒▒▒░░░░ \n" + " ░░░▒▒▒░░░░░░░▒▒▒▒▒▒░░▒▒▒ ▒▒▒▒▒▒▒▒▒▒▒░ \n" + " ░░░▒▒▒▒▒▒▒▒▒▒ ░▒▒░ ▒░░░ ▒▒▒▒▒▒▒▒▒░ \n" + " ░░▒▒▒▒▒▒░░▒▒▒▒▒░░░▒▒▒▒▒▒▒▒ ░░░▒▒▒▒▒▒▒▒▒ \n" + " ░▒▒░ ░ ░ ▒▒▒▒▒▒░▒▒ ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒ \n" + " ▒▒▒▒░▒░ ▒▒▒▒▒░░ ▒▒▓░ ░░▒▒▒▒▒▒▒▒▒░ \n" + " ░░▒▒▒▒▒▒▒▒▒░ ░▒▒▓░ ░░░ \n" + " ░░ ░▒▒▒▒ \n" + " ░▒▒▓░ \n" + " ░▒▒▓░ \n" + " ░▒▒▓ \n" + " ░▒▒▒ \n" + " ░▒▒▒ \n" + " ░▒▒▒ \n" + " ░▒▒▓ \n" + " ░▒▒▓░ \n" + " ░▒▒▓░ \n" + " ▒▒▒▒▒ \n" + " ▒▒▒▓▒ \n" + " ░▒▒▒▒▒ \n" + " ░▒▒▒▓▒ \n" + " ░▒▒▒▒▒ \n" + " ▒▒▒▓░ \n" + " ░░▒▓░ \n" + " ▒ ▒▒▓ \n" + " ░█▒█▒ ░▒▒ \n" + " ▓▓██▓ ▒▒▒ ▒▒▒░ \n" + " ▒▓▓▓█▒ ▒░░▓▓█▓▒ \n" + " ░▒▒▓▓▓ ▒▒████████ \n" + " ▓▓▓▓██ ░▒▒▓███████ \n" + " ▒█▓▓▒ ▒▓▒ " + "\nVoce retorna enfrentando as adversidades do caminho já passado anteriormente, agora com mais pressa e tranquilidade\n" + "por fim depara-se com a bifurcação norte/leste e ve o caminho ao sul de onde veio.\n" + "1-VOLTAR PARA A CIDADE 2-IR PARA O LESTE 3-IR PARA O NORTE"); resp = e.nextInt(); while (resp != 1 || resp != 2 || resp != 3) { if (resp == 1) { break; } if (resp == 2) { irLeste(); break; } if (resp == 3) { irNorte(); break; } System.out.println("Numero invalido, tente novamente\n" + "1-VOLTAR PARA A CIDADE 2-IR PARA O LESTE 3-IR PARA O NORTE"); resp = e.nextInt(); } System.out.println("Seguindo seu caminho de retorno para sua casa voce tem o pressentimento de estar sendo observado, nao interrompe sua caminhada mas sua atenção eleva-se\n" + "para qualquer ruido ou movimentacao nas proximidades. Com um subito salto um Goblin surge na sua frente, com uma clava em mãos segue velozmente em sua direcao.\n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " ░░░ \n" + " ▓▓░ ▓███████▓ \n" + " ██▓ ░██▓▓▓▓▓░▒██▒ \n" + " ░███ ████▓▓▓▒░░▒▓█▒ \n" + " ░░░ █▓▓███▒░▒▒▒▓▒▒▒▓▓█▓ \n" + " ██▓▓▓▒ ▓▓▒▓▓▒▓▓ ░▓▓▓▓▒▒▒░▓█▒ \n" + " ▒▒▓▓▒▒▒░ ▓█▓▓███▓▒▒░▓█▓ ░ ▓▓▒ \n" + " ▒░▒▒░░ ▓░▒▓▒▓███▓▓██▓░▒▓▓▓▒░ \n" + " ▒░ ░ ▓ ▓ ▒▒░▓▒▓▒░▒▓▓▓▓ \n" + " ▒▒░ ░▒▓▓▒▒▒▓▓▒ ▓▓▒░░░░▒▒░ \n" + " ▒▓▓▒░ ░░ ▓█████▓▓▓▓▓██▓▒░▒▒░░ \n" + " ░▓▓▒ ░██████▓▒▓▒▒▓████▓▓▓▒░▒▒▓░ ▒▓█████▒ \n" + " ▓█▓▓░ ▒█▓▓▓███▓▓▓▒▒▓▒▓▓▒▒░░░░░░▒▓▓▒░ ▓█▓███▓░▒▒ \n" + " ▒█▓█▓░ ░▒▓▒▓▓▓████▓▓▓▒▒░ ░░░░▒▒▒▓▓▓▓▒▓█▓▓ ▓▒▓▒▓█▓ \n" + " ▓██▒▓█▒░░▒▒░ ▓▓▓███▒▒▒▒▓▓▒░▓███▓▒▒▒▒░░▒▒▒▓▓░▓ ░ ░░░ \n" + " ▓▓▓█▓░▒░▒░░ ▓▓▓▓▓▓▓▒▒▓▒▒▒▓██▓▒▒▒▒░░░▒▒▒░▒▒░▒▒▒ \n" + " ░▒░ ░▒▒░░ ▓▓▓▒▒▒▒▒▒░░░░▓█▓▒ ▒░ ▒░░▒ \n" + " ░▒░ ▒█▓▓█████▓▓▒▒░▒▒▒░░░ \n" + " ░▓▒ ░ ▓▓▓▓██████████▒▒░░░▒ \n" + " ▒▓▓▒ ░█▓▓▓▓▓███▓████▓▒░░░▒ \n" + " ░░ ▓▓▓▓▓▓▓▓▓▓▓▒▒▒░▒▒░▒░ \n" + " ▒▒▒▒▓▓▓▒▒▒▒░░░░░░░▒ \n" + " ░▒░░░░░ ░ ░▒ \n" + " ░░▒▒▒░░░░░░░░░░░░▒▓▒ \n" + " ▒░▒▒░░░░░░░░░░░▒▒▒▒▓▒ \n" + " ▒░▒▒▒░░░░░░░░░▒▒▒▒▒▓▓▒ \n" + " ▓▒▒▒░░░░ ░▒▒▒▒▒▒▒ \n" + " ▒▒▒░░ ▒▒░░ \n" + " ░░░ ░░░ \n" + " ▓▒░░░ ▒▒▓▓ \n" + " ▒▒▓▒░▒░ ░▒▒░▒▒ \n" + " ▒▒▒▒▒▒▒░░░░░░ ▒▒▒░▒▒ \n" + " ░░░░ ░░ ░ \n" + "Apesar dos poucos segundos do surgimento da criatura voce observa sua grotesca feição, a boca aberta exibia seus amarelados dentes\n" + "também vestindo alguns farrapos e um velho colete de couro. Sem titubear voce esta pronto para a batalha\n" + "com um rapido movimento já está com sua arma em maos e a adrenalina elevada.\n" + "1-ATACAR O GLOBIN 2-FUGIR"); Som.globin(); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { Som.lutasoco(); System.out.println("Com a criatura estirada ao chao na sua frente voce ainda com o sangue quente da pequena luta, espera que mais deles aparecam para lhe enfrentarem\n" + "pois estas criaturas sempre atacam em bando. Apesar do pequeno momento de espera mais inimigos não aparecem"); Inimigo globinFase2 = new AnaoIA("Goblin", new Guerreiro()); Inventario loot = getRecompensa(globinFase2, getJogador().getClasseJogador()); abrirInventario(loot, "Goblin"); break; } if (resp == 2) { Som.corre(); break; } System.out.println("Numero invalido, tente novamente\n" + "1-ATACAR O GLOBIN 2-FUGIR"); resp = e.nextInt(); } System.out.println("Voce acha estranho, mas agradece mentalmente que isso não aconteceu, um inimigo, apesar do susto, não era um grande oponente\n" + "mas uma grande quantidade com certeza seria perigoso. Sem mais nada a fazer resta apenas retornar para a sua morada ao sul.\n" + "1-IR PARA O SUL"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-IR PARA O SUL"); resp = e.nextInt(); } System.out.println("" + "██████████████████████████████████████████████▓█▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▒▒▒▒▒▒▒\n" + "████████████████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "█████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▒▒▓▒▒▒▒▒▓\n" + "███████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▓▒▒▒▓▒▒▓\n" + "████████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▓▒▒▒▓▒▓▒▓\n" + "███████████████████████████████████████████▓█████▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▒▓▒▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▓▒▓▓▓▒▒▓\n" + "██████████████████████████████████████████▓█████▓▓▓▓▓▓▒▓▒▓▓▓▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▒▒▓▓▓▓▓▓▒▓▒▓▒▓▒▓▒▓▒▓▒▓▒▓▒▓▓▓▓▓▓▓▓▓▒▓▒▓▒▓▓▓▒▓▒▓\n" + "████████████████████████████████████████████████▓▓▓▓▓▒▓▓▓▓▓▓▓▓▒▒▒▒▓▓▓▓▓▒▓▒▓▒▒▒▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▓▒▒▒▓▒▓▒▓▓▓▒▓▒▒▒▒▒▓▓▓▒▓▒▒▒\n" + "████████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▓▓▒▓▒▓▒▓▒▓▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▓▓▓▒▒▒▒▒▒▒▓▒▓▒▒▒▓\n" + "███████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▒▓▒▒▒\n" + "██████████████████████████████████████████████▓█▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▒▒▓▓▒▓▒▒▒▒▒▓▓▓▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▒▒▒▓\n" + "███████████████████████████████████████████████▓█▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▓▓▓▒▓▓▓▒▒▒▒▒▒▒▓▒▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓\n" + "███████████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▒▒▒▒▒▒▒▓▒▓▒▒▓▓▒▓▒▓▒▒▒▒▒▒▒▓▒▒▒▓▓▓▓▓\n" + "███████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓\n" + "██████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓\n" + "█████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▒▓▓▓▒▓▓▓▓▓▓▓▒▒▒▒▒▓▓▓▓▓▒▓▓▓▓▓▓\n" + "████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▒▓▓▓▒▓▒▒▒▓▓▓▓▓▓▓▓▓▓▓\n" + "█████████████████████████████████████████████▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓███▓▓▓▓█\n" + "███████████████████████████████████████████████▓▒▒▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "███████████████████████████████████████████████▓▓▒▒▒▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒░░░░░▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓███████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░▒░▒▒▒▓▓▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "███████████████████████████████████████████▓▓▒▒▒▒▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒░▒░▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓█▓███▓████████████████████████████████████▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░░░▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓████████████████████████████████████████████▓▓▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒░▒▒▓▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓█▓███████████████████████████████████████████▓▒▒▒░▒▒▓▓▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▓▒▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓█▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "███████████████████████████████████████▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "████████████████████████████████████████▓▒▒▒▒▒▓▓▓▓▓▓▒▒░▒▓▓▓▓▒▒▒▒▒▒▒▒░▒▒▒░░▒▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "████████████████████████████████████████▓▓▒▒░▒▓▓▓▓▒▒▒░▒▒▓▓▓▒▒▒▒▒▒▒▒▒▒░▒░░░▒▓▓▒▒▒▓▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓███████████████████████████████████████▓▒▒░▒▒▒▓▒▒▒▒▒▒░▒▒▓▓▓▓▒▒▒▒▒░▒▒░░░░▒▓▓▓▓▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "████████████████████████████████████████▒▒░░▒▒▒▒▒░▒▒▒░░▒▓▓▓▓▓▓▒▒▒▒░░░░░▒▒▒▓▓█▓▓▓▓▓███▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓█▓████████████████████████████████████▓▓▓▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▒░░▒░░░░▒▒░░▓█▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "██████████████████████████████████████▓ ░▒▒▒▓▓▓▒▒▒▒░▒░▒░░░░▒▒▓▒░░░░▒▒▒░░░░░▓█▓█▓█▓█▓█████████▓█▓█▓▓▓█▓▓▓█▓█▓█▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓█▓█▓▓██▓█████████████████████████████▓▒ ▒▒▓▒▒▒▒░░░░░░░░░░▒▒▒░░░░░░▒▒▒░ ▒████████████▓▓█████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "░ ░░ ░░░ ░▒░░░ ░ ░▒░░░░░░▒▒░▒░ ▒▒▓▓▓▓▒░░▒▒░░░░░░░░░▒▒░░░░▒░░░░ ░░░░░░░░░ ░ ░ ░ ░▒░░ ░░░ \n" + "░░░░ ░ ░░░ ░ ░▒░░░ ░ ░░▒▒▒▒▒▒▒░░▒▒▓░░▒▒▒░░ ░░▒░░░ ░░ ░ \n" + "▒▒▒▒▒▒▒▒▒▒▒▒▓▒░ ░▒░ ▒▒▓▒▒ ░░░░░░░░░░░░░ ░░░░░░░░░ ░ ░░ ░ ░ ░ ░ ░░░░ ░░ \n" + " ░▒▒▒░░░▒░░░▒▒▒░▒▓▒▒▒░░▒▒▒▒▒░▒▒▒░░░░▒░░░░░░░░░░░░ ░░░ ░ ░ ░ ░ ░ ░ ░ ░░░ ░ ░░ ░ ░░░░░░░▒\n" + "▒▒▒▒▒▒▒░░▒▒ ▒▒▒░░░▓▓░░░░▒▒▒░░░▒░▒▒░░▒▒▒▒▒▒▒░ ░▒▒▒▒▒▒▓▒ ░░ ░░ ░ ░░░░░ ░ ░ ░░░▒▒▒▓\n" + "▒▓▓▓▒▒▒▒▒▒▓▓▒▒▒░ ░░ ░▒▒▒░ ░░▒░░ ▒▒░▒░ ░ ░░ ░ ░ ░ ░░ ░ ░ ░ ░░░▒▒░░░░\n" + "░░░ ░ ░░ ░ ░ ░ ░ ░ ░░░░░ ░░ ░▒░░░ " + "\nCaminhando para fora da floresta, ainda com sua arma em mãos para prevenir qualquer surpresa e encurtar seu tempo de reacao\n" + "nesta parte da mata já é possivel avistar o sol comm clareza pois a vegetação nao e mais tao densa\n" + "Voce observa que ja passou do meio dia e a fome é grande, fazendo apressar seu passo. Ao sair da mata\n" + "voce avista fumaça no ceu, nao é algo totalmente estranho, mas a quantidade meio que lhe deixa preocupado.\n" + "Entao voce sente vontade de saber do que se trata, procurando um ponto de observacao melhor.\n" + "1-OBSERVAR"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-OBSERVAR"); resp = e.nextInt(); } System.out.println("" + " \n" + " ▒ \n" + " ░▓▓█▓▓█▓▓███▒ \n" + " ░ ▒▓▓░ ▓█▓██▓████████▓░ \n" + " █▓ ▓██▓██▓█▓▓████▓▓███▓▓░▒ ▒░ ▒ \n" + " ▒██▓▓██▓▓█▓▓░ ▓▓▓█▓█▓██▓▓██▓▓▓▓█▓▓▓▓▒▒▒▓█▓█▓ ░▓▓░▓ \n" + " ▓█▓▓███▓███▓██▓▒░▒█▓▓█▓▓▓▓▓▓█▓█▓▓▒▒▓▒▒▓██████████▒▓░ \n" + " ░▓█▓ ▓▓▓███████████▓▓▒▒▒▒▓▓▒▒▓▓▓▓▓▓▓▓▓▒░▒▒▒▓█▓████████▓▒░ \n" + " ▓▓█████▒░░▓▓▓▓█▓████▓▓▓█▓▒▒▒░░▓▓▓░░░░░░▒▒▒▒░░░▒▒░▒▒▓█▓▓▓▒▒▒▒▓▒ \n" + " ▓████████▓▓▓▓▓█▓▓▓▓█▓▓▓▓▒▒▒░ ░▒▒░░▒░ ░░▒░░░░░░▒▒▒░░ ░ ▒▒░▒▒░▒ ░▒░ \n" + " ▒▓▓█▓▒▓▓▓▓██▒▓▓▓▒▓█▓▒▓▒▒▓▒ ░░░░░ ▒▒▓▓▒▒▒░▒▒▒░▒▒▓██▓▓▓▓▓▓███▓▒▓▓▓░ \n" + " ▒▒▒▒▒▒▓▓▓▓▓▓▒▓▓░▒░▓▓█████████▓█▓▓▒░░▒▒▓▓████████▓██▓▓█▓▓░ \n" + " ▒▒░ ▒░ ▒▒▓▓▓▒░░▒▓░░▒▓██████████▓▓▓▓█▓▓▒▒▒▓▓▓▓████████▓▓▓▓▒▒▒▒░ \n" + " ▓███████▓▓░░▒▒░░▒▒▒░▒▒▒▒▒██▓▓▓▓████▓▓▓▓█▓▒▓▒░▒▒▒▒▓▓█▓▓█▓█▓▓▓▒▓▓▓▓░░ \n" + " ░▓██▓██▓▓██████▓▓▒░▒▒▒░░░░░░▒▒▓▓▓▓▓▓▓██▓▓▓▒░▓▒▒▓▓▒▒▓▓░▒▒▒▒░▒░░▒▒▒▓▓░▓ \n" + " ▒█▓█▓▓▓███▓▓▓▓███▓▒▒▒▒░░ ▒▒▓▒▒░░ ▒ ░▒░ ░░ ░▓███▓▓█░░▒░▒█▓▒░░░▓ \n" + " ░▒▒▓▓▓▒▓▓███▓█████▓▓▒▒░░░░░░░░░░░ ░ ░ ░░▒▓▓▒▒▒▓████▓██████▓▓▓█▒ ▒▒▓ \n" + " ░▓▒▒▒▒▒▒▓▓▒▒▓▓▓▓▓▓▒░░▒▒▒░▒▒▒▓▓▓█▓▓▓▓█▓█▓▓█▓▓▒▒▒▒░░░░▒▒▒▒▓█▓██▓▒███▓███████▒░░░ \n" + " ░█▓▓▒▒░▒▒▒▒▒▒▒▒▒▓▓▒▒░▒▒▒▒▒▒▓█▓████▓▓██▓████▓████▓▒░░░░▒▒▒▒▓▓▓▓█▓▒██▓█▓▓████▓▓▓▓ \n" + " ░▒▒░▒▓▓▒░▒▒▒▒▒░▒▒▒▒▓▒▒▒▓▓▓▓█▓█▓▓███▓▓█████▓▓███▓▓▓░░░░ ░░▒▒▒▓▒▓▓█▓█▓▓█▓▓▓▓▓▓▓▓▓ \n" + " ░ ▒▒ ░░░▓▒▒▒▓▓██▓███▓▓▓█▓▓▓██▓▓▓▒▒▒▓▓▒▓▓█▒▒▒░░ ░░▒▒▒▒▓▒▒▓▓▓▒▓▓▓▓░▒▓▒▒░ \n" + " ▒▒▓▒▓░░░░░░░░▒▒▓▒▒▒▓▓▓▓▒██▓█▓▓▓▒▒▓▒▓▒▒▒▓▓▓▓▓▓█▓▒░░▒▓▒▒░▒▒▒▒▒▓▓▒▒▒▓▓▓▓▓▓▓▒▒▓▓░ \n" + " ░▒▓▓████▓▒▓▓▓▒░░░ ▒▒▒▒▒▒▓▓░▓▒▓▒▒▒▓▒▓▒░▒▒▒▓▓█▓▓▓█▓▒▒▒▓███▓▓░ ░░░▒▓▒▒▒░▓▓▒░▒▓▓ \n" + " ▒▓█▓▓▓▓▓▓▓▒▓▓▓▓▒▓▒▒░░▒▒▒▒▒▒▒▒░▒▒▒▒▓▓▒▒░░░░▒▒▒▓▒▒░▒░░▒▓███▒▓▓▒▒▒░░░░░░░▒░░▒▒░▒▒▒▒▒ \n" + " ▒█▓▒▓▓▒▒▒▓▓▓▓▒▓▒░░░░▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░▒░░░░▒▓▒▓▒░░░░▒▒▒░▒▒▒▓███▓▓░░░░▓▒▒░░░▒▓▓░▒ \n" + " ░░░░▒▒▒▒░▒▒▒░░░░░░░░▒▒▒▒ ░░░▒░▒░ ░░░▒▒ ▒▒░▒▒▓▒ ░▒▒░░ ▒▒░▒▓▓███▓▓▒▓▓▒▒▒ ░ ▓▒ \n" + " ░▓▒▒▓░▒▒▒▒▒▒▒░░░░░▒▒░▒▓▒░░▒░░░░░░░ ░▒░░ ░░▒ ░░▒▒░▒▒▓▒▒▓▓█▓▓▒░▓▓▒ \n" + " ░ ▒▓▒▒▒▒░▒▓▒▒▒▒▒▓▓▒▒▒▒▓▒▒▒▒▒░░░░░░░░ ░░▒░░░▒▓▓▓▓▒▒▒▒▒▒ ▒▓▒▓░ ░ \n" + " ▒▒▒▓▓░▓█▒ ░░▒ ░▒▒▒░░▒▒░ ░░░░ ▒█▓▓░▒▓▒ \n" + " ░ ░▒░░░▒░░░░░ ░░ \n" + " ░▒░▒░░░░░░ \n" + " ▒▒▒░░▒░▒ \n" + " ▒▒▒░▒▒░▒░ \n" + " ▒▒▒▒▒░░▒▒ \n" + " ▓▒▒▒▒░▒▒▒ \n" + " ▒▒▒▒▒▒░░▒▒ \n" + " ░▓▒▒▒▒▒░▒▒▒▒░ \n" + " ░▒▒▒▓▓▓░▒▒▓▒░░▒▒▒▓▒▒░ \n" + " ░░░▒▒▒░░░▒▒▒░ ░░░░▒▒▒ \n" + " " + "\nSubindo em uma arvore já nas proximidades de sua casa voce avista a vila toda envolta em chamas e fumaca.\n" + "Uma sensação de desespero enche seu corpo, a sensação de perder entes queridos novamente o deixa perplexo\n" + "o unico pensamneto agora é correr para a vila.\n" + "1-CORRER PARA A VILA!"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-CORRER PARA A VILA!"); resp = e.nextInt(); } System.out.println("" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " ░ \n" + " ░▒▒▒▒▒▒▒▓▓▓▓▓▓▒▒▒▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒░ \n" + " ░░▒▓▓▓▓▓▓▓▓▓▒▓▒▓▓▓▓▓▒▓▓▓▓▓▓▒▓▓▓▓▓▓▓█▓▓▒▒▒▒▒ \n" + " ░▓▓▓▓▓▓▓▓▓▓▒▒ ░ ░░▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▒░░░▒▒▒▓▓░ \n" + " ▒▒▒▒▒▒░▒▒▒▓▓▓▓▓▓▒▒▒▒▒▒░░░▒▒▒░░░▒▒▒░░▒▒▒▒▒▓▓▒▓░ \n" + " ▒▓▒ ▒░ ░░▒▒▓▓▓▓▓▓▓▓▓▒▒░░ ░░▒▒▒▒▒▒▒▒▒▒▒▒▓▒ \n" + " ░▒▓▓▓░ ░░ ▓▓▒░ ░░▒ ░░▒▒▓▓▓▓▓▓▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▓▒░░ \n" + " ▒▓▓██▓ ░ ░ ░▒▒░ ░▒▒▒░░░ ░░░░ ░▒▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒░▒ ░▒▓▒ \n" + " ░▒▓░▒██▒ ▒▒▒░▒░▒▓▒░ ░▒░ ░▓▓▒░░░░░░ ▒▓▓▒▒▒▒▒▒▒▒▒▓▒▓▓▓▓▓▓░ \n" + " ░ ░░░▒██▒ ░░▒ ░ ░░░░ ░░▒▓▒▒░░▒░░▒░░ ░▓░ ▓▓▒▒▒▓▓▒▒░▓▓▓▓▓▓▓▓▓ \n" + " ░░░▒ ░█▓█▓▓▒▓▓▓▓▓▒ ░░░▒▒░░░ ░░▒▒▒ ░▓▓▒▓ ░█▓▓░▒▓▓▓▓▓▓▓▒ \n" + " ▒▒▒ ░▒▒▒▓▓▓▓█▓▒░░░▒ ░ ░░▒░░ ░▓▓▓▓▒▓▒ ▓█▓▓▓░░░ \n" + " ▓▓▓▒▒▒▒▒▒░░ ░ ░░▒▓▓░░░▒ ░▓▓▓▓▓▓▒▒▒▒▒▓▒░ \n" + " ▓▓▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░ ▓█▓▒▒▓▓▒ ▒█▒ \n" + " ▒▓▒▒▒▒░▒▒▒▒▒░▒░░░▒▒▒▒▒▒▒▒▒▒░░▒░░ ▒▓▓▓▓▒▓▒░▒ \n" + " ░▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░▒▒▒▒▒░░ ▒▓▓▓▓▓▓▓░ \n" + " ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒▓▓▒ ▒█▓▓▒▒▓░ \n" + " ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▒░▒ ▓▒ \n" + " ▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓░ \n" + " ░░░░▒▒▒▓▓▒▒▒▒▒▒▒▓▓▓▒▒▒▒▒▒▒▒░▒░▒▒▒▒▒▒▒▒▒▒▒▓▓ \n" + " ░▒▒▒░▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒▒▒▒▒▒▒▒▒▒▓▒ \n" + " ░▒▒▓▓▒▒░ ░░▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒▒▒▒░░▒▒▒▓▒ \n" + " ░▓▓▒▓▒▒ ▒▒▒▒░▒▓▓▒▒▒▓▓▓▒░ \n" + " ▒▓▓▒▒ ░▒▒▒▓▓▓ \n" + " ░░░ ▒▓▒▓▓▒ \n" + " ░▓▓▓▒░ \n" + " ▒▒▒▒▒ " + "\nAbandonando sua bolsa (cheia ou nao) voce se põe em corrida, ignorando o cansaco e fome das suas acoes anteriores.\n" + "Apesar de voce ser um estrangeiro aquelas pessoas o receberam de bracos abertos, ha alguns anos voce convive com eles\n" + "seja em confraternizações, trabalhos ou caçadas, eles não eram sua familia de sangue mas não importava, voce gosta deles\n" + "e vice versa. A vontade de chegar logo lhe forcava a correr ainda mais rápido.\n" + "1-USAR TODA A SUA ENERGIA PARA CORRE MAIS 2-CORRER NORMALMENTE"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { Som.corre(); System.out.println("" + " \n" + " ▒▒▒▒░ \n" + " ░ ▓▒░▒▒▒▒ ▓▒ \n" + " ░▓▒█░▒▒▒▒▒▒▒ ▒▒▒▒▒▒░░░ \n" + " ▒▓▒▒░░▓▒▒ ▓▓▓▒▒▒▒▒▒▒▒▒ \n" + " ░▒▒▒▒░▓▓▓█▓▓▒▒▓▒▒▒▒▒▒▒▒▒▒▒▓▓ \n" + " ▓█▓▓▒▓▓▒░█████ ░▒▒▒▒▒▒▒▒ ░▒▒▓█ \n" + " ██▒░ ▓█▒▒░ ▓██░ ░░░▒▒▒▒░ ░▒▒ \n" + " ░▓█▓▒▓▒▓▓░░░░░ ░░▒▒▓▒ ░░▒▒ \n" + " ▓▓▓░░░░▒░░░░░░▒▓▓▓▓▒░▒▒▓ ▓▒▒▒ \n" + " ░▒█▓░ ░ ░░░▒▒▒█████▓▒▓▓░ ░ \n" + " ░▒▒▓▒▒▒█▒ ▒▒▒▒▒▓███▓▒▒▒▓ \n" + " ░░▒░ ▓█░░░▒▒▒░░░ ░░▒ \n" + " ▒█▒░░░░░ ░▒░░▒ \n" + " █░░░▒▒░▒▒▒░░ \n" + " ▒▓░▒▒▒░░▒▓▓▓ \n" + " ░▓▒▓█▓▓▒█▓▒░░ \n" + " ▓ ░▒▒▒▒▒░░▒▒░ \n" + " █▒ ░░▒░░░▒▒░▒ \n" + " ▒█░░▒░░█▒▒▒░▒░ \n" + " ▒█▒▒▒▒█ ▒▒▒▒▒ \n" + " ░▓▒░▒█░▒▒░░▒ \n" + " ▒▒░▓▒▒▒░░▒ \n" + " ░▒░▒░▒░ \n" + " ░▓░░░▒░ \n" + " ▓░▒▒░ \n" + " ░▒▒▓░ \n" + " ░▓░▒░░ \n" + " ░▓ ░░▒░ \n" + " ▓ ░░▒ \n" + " ▓▓████ \n" + " ▓████▓ \n" + " ░███▓▓ \n" + " █▓█▓█ \n" + " █▓███░ \n" + " ▓▓▓██░ \n" + " █▓██ \n" + " ▓█▓██▒ \n" + " ▒█▓███▓ \n" + " ▓▒░ "); System.out.println("\nNOSSA!! Voce é o Usain Bolt dos RPGs!"); break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-USAR TODA A SUA ENERGIA PARA CORRE MAIS 2-CORRER NORMALMENTE"); resp = e.nextInt(); } System.out.println("O caminho para a vila parecia aumentar em vez do oposto, as passadas não condiziam com a vontade de chegar.\n" + "Por fim, voce finalmente para de correr na entrada da vila, Seus olhos nao conseguem acreditar no que veem\n" + "e o cansaco da corrida não vem. Quase todas as construcoes viraram uma pilha de cinzas e as que ainda resistiam\n" + "ardiam em chamas, varios corpos sem estavam espalhados pelo local, alguns ardendo em chamas junto das construcoes.\n" + "Uma indescritivel sensação domina seu corpo mas voce nao fala nada, está em um estado catatônico, apenas\n" + "as suas ainda respondem, e elas deviam ir em direção ao centro de Kenko.\n" + "1-IR AO CENTRO DA VILA 2-SENTAR E DESCANSAR"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 2) { System.out.println("Voce, aparentemente desolado desaba no solo, ficando ali algum tempo\n" + "passado algumas horas, resolve ir ao centro de Kenko"); break; } if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-IR AO CENTRO DA VILA 2-SENTAR E DESCANSAR"); resp = e.nextInt(); } System.out.println("" + " \n" + " \n" + " ▒ \n" + " ░▓▒ \n" + " \n" + " ░ \n" + " ░ \n" + " \n" + " ░░░░░░░ ░░░░ ░░░ \n" + " ░░░░░▒░░░░▒░░▒▒░▒▒░░▒▒▒░▒░░ \n" + " ░░░░░▒░▒░░░░▒▒░░▒▒▒░▒▓▒▒░▒▒▒▒▒▒▓▒▒░ \n" + " ░ ░ ░░░ ░░░▒░░░▒▒▒░▒▒▒░░░░ \n" + " ░ ░▒▒▒ \n" + " ░░░░▒░░░ ░░░░ ░ ░▓▓▓░ \n" + " ▒▓▓▒ ░▓▒▒ ▒▓▓ ░▓▓▒▓▓▓▒ ▓▓▓▓▒▓ ░▒░▒▒▓░ ░▒▒▒▓▓ \n" + " ░▒▓▓▓▓ ▒▒▒▓▓▓▒▒▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▒ ░▒▒▒▓▓ \n" + " ░▒▒▓▒▓░ ░▒▒░▒░▒▒▒▒▒▒▒▒▒▓▓▓▓▒▒▒▒▓▓▒▒▓▓░░ ░░▒▒▓▓ \n" + " ░▒▒▓▒▓▒ ░▒▒▓▓█████▓░░▒▒▒░░░▒▒░░░░░░▒▒▒░░▒▒░▒▓███████▓▓▓▓▒▒▒░░ ░░▒▓▓ \n" + " ░░▒▓▒▓▒ ░░▒▒▒▓▓██████▓▒░ ░░░░░░░░░░▒▒▒▓▒ ░░▒▓▓▓████▓▓███████░ ▒▓▓ \n" + " ░░▒▒▒▒░▒▒▓▓▓▓▓▓▓▓██▓▓▓▓▓▓▒░░ ░▒▒▓▓▓ ░▒▒▓▓████▓▓▓█▓▓▓▓▓▓█▒ ▒▓█░ \n" + " ░▒▒▓▓▒░▒░░░░░▒▒▒▒▒▒▓▓▓▓██████████▓▓▓▓▓▓▒▒▒░▒▒▓▓▓▓▓▓███████▓▓▓▓▒▓▓▒▓█▓▓▒▒▒▒ ░▒▒ \n" + " ░░▒▓▒▒░░░░░░░░░░░ ░ ░░░░▒▒▒▒▓▓██████████░▒▒▓▓▒▓████▓▓▓▓▓▓▒▒▒▓▓▓▓▓▓▓█▒░▒█▒ ▒▒▓░ \n" + " ░░ ░░░▒▒░░░░░░░░░░░░░░░░░░ ░░ ░░░▒▒▒ ▒░▒▒▒▒▓▓▓▓▓▒▓██▓▓▓█▓▓▒▒▒░ █████▒ ▓▓█▒ \n" + " ░▒▒▒▓▓▓▒▒▒▒▒▒▒▒▒▒░░░░░░░░░░░░░░░░░░░░░░░░▒▒▒▒▒▓▓▓▒▓▓▓▒▒▒▓██▓▒░▒▓▓▓▓▓▓▒ ▓██░ ▒▓█░ \n" + " ░▒░▒▓▒▒▒░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░░░░░░░▒▒▓▓▓▓▓▓▒░░░▒▒ ▒▓███▓▓▒▒▒▒▓▓▓▓▒▒█▓▒▓█▒ \n" + " ░▒▒▒▓▒▒▒▒▒▒▒▒░░░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░ ░▒▒▒▓▒▒░▓▓▓▓▓░▒░░▒▓██▓▓▓▓▓▓▒▒▒▒▒▓▓█▓▒▒▒▒▓▓▒ \n" + " ░▒▒░▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░▒▒▒▒▒▒▒▒▒▒▒▒▒ ░░░░░▒▓▓▓▒▓▓░▒▓▓░▒▓▓██▓░▒▓▓██▓▒░ ▓█▓░░▒▓▓ \n" + "░▒▒▒▓▓▒▒░░▒▓▒▒░░▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░▒▒▒▓▓▓▓▓█▒▒▓▓▓▓ ░▒▓▒░░▒▓▓██▓▒▒▒▒▓▓▓▓▓░▒██▓▓▓ \n" + " ▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒░▒░░░░░▒▒▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▒▒▓█▓░░▒▓▓▒▓▒░▒▓██▓▓▓▓▓▓▒▒▒▒▓▓▓█▓ \n" + " ▒▒▒▒▓█▓▒▓▓▓▓▒░▒▒▒▒▒▒▒▒▒░▒▒▒░▒░▒░░░▒░▒▒▒▒░▒▒░▒▒▒▒▒▓▓▒▓▓░▒▒▒▓░░▒▓▓▓▓▓▓▒░▒▓██▒░▓▓██▓▒▒░░▓█▓▒ \n" + " ░░░░░ ░▒░ ░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░▒▒▒░░░░▒▒▒▒▒█▓▒▓▓▒▓▓██▒ ▒▒▒▒▓▓▓▓▓░░▒▓██▓▒░▒▓▓█▓▓▒ ▒██▒ \n" + " ░░░░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▓██████▒ ▒▒▒▒▒▒▒▓▒▒▒░░▓██▓█▓▓▒▒▒▓▓█▓▓▓██▒ \n" + " ░░░░▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▒▓▓▓▒ ░▒▒▓▓▓▓▓▓▓█▓▓░▒▓██▓▒▓████▓▒ ▒▒ \n" + " ▒▒▒░▒▒▒▒▒▒░ ░░▒▒▓▓▓▒▒▓████▓░ \n" + " ░▒▒▒░▒▒▓█░ ▒▒▓▒ \n" + " ░░▒▒▒▒░ " + "\nA paisagem anteriormente se extendia a esta parte da vila também, apenas o antigo templo em seu centro ainda resistia\n" + "nao por falta da insistência de seja lá quem que atacou este pacato lugar, suas grossas paredes feitas de solidas\n" + "rochas a muito tempo, desde que se tem registro esta edificação existia em Kenko\n" + "certamente fazia parte de algo antigo.\n" + "Apenas sua porta estava totalmente destruida, certamente os moradores tentaram abrigar-se do ataque neste solido abrigo.\n" + "Voce sente que deve entrar.\n" + "1-ADENTRAR NO TEMPLO"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { Som.templo(); break; } System.out.println("Numero invalido, tente novamente\n" + "1-ADENTAR NO TEMPLO"); resp = e.nextInt(); } System.out.println("A imagem ali dentro com certeza não sairá da sua mente tão cedo, varios corpos mutilados e sinais de luta pelo ambiente.\n" + "Em meio ao silencio enlouquecedor voce ouve alguns gemidos. mais que depressa tenta localizar a origem deste som\n" + "uma breve sentelha de esperanca ascende em seu interior.\n" + "2-PROCURAR A ORIGEM DO SOM"); resp = e.nextInt(); while (resp != 2) { if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "2-PROCURAR A ORIGEM DO SOM"); resp = e.nextInt(); } Som.ferido(); System.out.println("" + " ▒ \n" + " ▒░ ▒▓ \n" + " ▓▓ █░ \n" + " ▒█ ░█ \n" + " █░ █▓ \n" + " █▓ █░ \n" + " ▓█ ▓█ \n" + " ░█ █▓ \n" + " █▒ ▒▓██░ \n" + " ▒██▓ ▒▓▒ \n" + " ▒▓█▒ ▓▓▓▒▒░ ░▒▒▓▓██▓ ░ ▓▒ \n" + " ▓░ ░ ░ ░░░░░░░░░▓████████████████▒ ▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░ ▒█▓ \n" + " ██▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░▓█████████████████░░▒▒▒░▒▒▒░▒░▒░▒░▒░▒░░░▓█▓ \n" + " ▒██▓ ░░░░░░░░░░░░▒░▒░░░░▓█▓███████████████▓░░░░░░▒░░░░░░░▒░░░░░░▒▓ \n" + " ░░░▓▓ ░░░░░░░░░░░░░░░░░░▒▒░░░░░░░░░░░░░░░░▒░░░░░░░░░░░░░░░░░░░░▒▓▓░░ \n" + " ░░ ░▓▓░ ░░░░░░░░░░░░░░░░░░░░░░░░ ░ ░ ░░░░░░░░░░░░░░░░▒░░░▒░▒░▒░▒▓▓███▒▒░ \n" + " ░▒░░▒█▓██▒ ░░░░░░░░░░░░▒░░░░░░░░░░░░░░░░░▒░░░▒░▒░░░▒░▒░░░░░░░░░░░░▒▒▓▓░░▒░ \n" + " ▒▒░░▓▓▓███▒░░▒░░░▒░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ ░░░░░░░░░░░░░ ░░░░▒░\n" + " ░░░░░░ ░ ░░ ░ ░ ░ ░░░░░░░░░░░░░░░░▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "░▒▒░░░░░░░░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒\n" + " ▓▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▓▒▓▒▓▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ \n" + " ░▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▒▓▓▓▒▓▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░ \n" + " ▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▒▒░░▒░▒░░░░░░░░░░░░░░░░░░░░ ░ ░░░ ░ \n" + " ░░ ░░░░░░░ ░ ░ ░ ░ ░ ░░░ ░░░ ░ ░░░░░░░░░ ░░░░░░▒░▒░░░░ ░ ░ \n" + " ░░░░▒░░░▒▒▒░░░░ ░ ░ ░ ░ ░░░ ░ ░ ░░░░░░▒▒▒▒▒▒▒░░░▒░░ \n" + " ░░▓▓░░▒░▒░░░░░░░░ ░░░░░░░░░░░░░░░░░░▒░░ \n" + " ░▓▓░░▒░░░░░░ ░ ░░░░░░░░░░░░░░░░░░░░▒▒▒░░░░░░░░░░░░░░░▒▒░ \n" + " ░▓▒░░▒░░░░░░░▒░▒░▒▒▒▒▒▒▒▒▒░░░░░░░░░░░░░░░░░░░░░░░░░▒▒░░ \n" + " ░▓▓░░░░░░░░▒░░░░░░░░░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▓░░░░ \n" + " ░░░ ░▓▓▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░░ ░░░░ \n" + " ░░░░ ░▒▒▒▒▒▒▒▒▒▒▒░▒░░░░░░░░░░ ░ ░ ░░░░░░▒▒▒▒▒░ \n" + " ░░░░░░░ ░░░░░░░░░░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▒▓▒▓▒▒▒▒▒░░ \n" + " ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░░░░░░░░▒▒▒▒▒▒▒▒▒▒▓▒ \n" + " ░░░▒░░░░░░░░░░░░░░░░░░░░░▒░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░ \n" + " ░▒▒▒▒▒▒▒░▒▒▒▒▒▒▒▒▒▒▒░░░░░▒░░░░░░░░ " + "\nContornando o altar central esta nomeDoTio no chao, encharcado de sangue de um ferimento em seu peito e notaveis sinais\n" + "de tortura pelo corpo. Ele rastejava para fora de uma abertura no chao do templo que você sequer sabia da existencia\n" + "Seu amigo de longa data, foi o motivo de voce vir morar em nomeDaVila, salvando-o da perdição que se encontrava e dando\n" + "de certa maneira um novo motivo para continuar vivendo depois dos acontecimentos de seu passado. Ele balbucia algumas palavras\n" + "mas voce não entende, deve se aproximar para compreender melhor.\n" + "3-APROXIMAR-SE"); resp = e.nextInt(); while (resp != 3) { if (resp == 3) { break; } System.out.println("Numero invalido, tente novamente\n" + "3-APROXIMAR-SE"); resp = e.nextInt(); } System.out.println("Em seus bracos Henry reconhecendo sua feição começa a falar: - Eles levaram o fragmento do orbe que estavamos protegendo\n" + "por favor impeça que eles consigam utiliza-lo, por favor, eramos seus guardioes e agora o tomaram de nós\n" + "por favor o recupere e os impeça de ressuscitar nomeDoVilao. Você não entende muito bem a situação\n" + "Agora o unico clamor em seu pensamento é saber quem fez tal barbárie com todos e o questiona.\n" + "1-QUEM FEZ ISSO? 2-VOCE IRA FICAR BEM?"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("" + " \n" + " \n" + " ▒░ \n" + " ▒ \n" + " ▒ \n" + " ▒ ░▒▓▒░ \n" + " ▒ ░▓▒░▒███████▓▓░ ▒▓░▒▓▒ \n" + " ▒ ▒▓▓███▓▒▓▓▒▒▓▓▓████▓▓▓▓▓▓▓░ \n" + " ░▒▓▒▓▒ ░▒░▒▒▓▓▓▓█▓░▒░ ░░░░▒░░ ░░░ \n" + " ▒▓▓▒░▓█▓▒▓▒▓▓▓▒░ ▒░ ░░░░░▒░░ \n" + " ▒▓▒░░░▒▓▒▓▓▓▓▓▒░░▒▒░ ░░░░░░░░░░░░ \n" + " ▒▒▓▒▒▒▒▒▓▓▓▒▒░▓▓▒▒█▓▒ ░░░░░ ░░░░░░░░▒░░░▒░░ \n" + " ░▒▒▒▒░▒▒▒▒░░▒░▒████▓▒▒ ░░ ░░░░░░░░░░░░▒▓▒░░░ \n" + " ░▒░░▒░░░▓▓▓▓██▓▓▓▓▒▒░ ▒▓▒░░░░░░░░░░░░░░░░░░░▒▓▓█▓░░ \n" + " ▒███▒▒░▒▒▒▓██▓▓▓▓▓▓▒▒▒▒░▒▓▓░░░▒▒▓▒▒░░░░░░░ ▒▓▓█▓▒░ \n" + " ▓██▓▓░░▒▒▒▓▓▒▓▓▓▓▒▒▒▓▒░▒▒▒▒▒▒▒░▒░▒▒▒░▒▒▒ ░░▒▓▓▒▒░ \n" + " ▒▒▓▒▒░░░▒▒▒▒▒▓▒▒▒░▒▓█▒▓▒▒░░░░░░░░░░░░▒▒░▒▓ ░▒▓▒▒░░ \n" + " ▒▓███▒░ ░░░░▒▒▒▒▒▒█▓ ░▒▓▓▓▓▓▓▒▒▒░░░░▒░░░▓ ░▒▒▒░░ \n" + " ░▓█▓▓▓▓▒░ ▒▒ ░░▓█░ ░░░ ░▒░░▒▒▓▒░░▒░░ ▒▒ ░▒▒░░ \n" + " ▒▓███▓░ ░▓ ▒▓░▒▒▒▒▒▒▒░░ ░░▒▒▒▒░ ░▓▒ ▒▓▒▒▒▒░░ \n" + " ▒▓▓▓▒▒░ ▓▒ ▒▓▓▓▓▓▒▒░▒░░░▒░░░▒▒░ ░▒▒▒▒▒▒▒▒▒▒▒▒░ \n" + " █▒▒▓▓▓▓▒░░▒▒░ ░░▒▒▓▓▓▓▒▒▒▒▒░░▒░░░░ ░░░ ░ ░ ░▒▒▒▒▒▒▒▒░░░ \n" + " ▒▓▒▓▓▓▓███▓▒▒ ▓▓ ▒▒▒░▒▒▒▒▒▒░▒▒▒░ ░░░░░░░░░ ▒▒░░ \n" + " ▒▒▒▒▒▒▒▓▓▓▓▓▒▓█▓▓▒▒▓▒ ░░ ░▒▒░░▒▒░▒▒▒░░░░░░░░░░ \n" + " ░▒▒▓▓▓▓▓▒███▒▒░ ░░▒▒▒░░▒▒▒░░░░░▒░░░░░ \n" + " ▒▓▒▒▒▓████▓ ▒▒▒░▒▒▒▒▒░▒░░░▒▒▒▒░░░ \n" + " ▒▒ ░▓███▓ ░░░▒▒▒░▒░░░▒▓▓▓▓▒▒░░░░ \n" + " ░▓▒ ░ ░░░▒▒░░▒▓▓▓▓▒▒▒▒▒░░░░ \n" + " ░▓▓░ ░░ ░░▒▒▓▓▓▓▒▒░░░░ \n" + " ░▓▓▒ ░▒▓▓▓▒▒▒░░░░░ \n" + " ░▓▓▒ ░▒▒▒▒░░░▒▒▒░ \n" + " ▓▓▒ ░░░░▓▒▒▒▒▒░ \n" + " ▒▓▒░ ▒▓░▒▒░▒░ \n" + " ░▓▓░ ░░░▒░░▒ \n" + " ░▒▒░ ░▒░▒▒▒ \n" + " ░▒▒ ░░▒▒▒░ \n" + " ▓▓ ░░▒▒ \n" + " ░▒░ ░▒▒▒ \n" + " ░░ ▒▒▓ \n" + " " + "\nQuem fez isso com todos? -voce pergunta algumas vezes- nomeDoTiu esforça-se para lhe responder mas parece que\n" + "a vitalidade esta indo embora de seu velho corpo, esta batalha ele não vencerá… Com suas ultimas forcas ele responde que\n" + "foram os nomeDoBando, novamente pede para que voce os impeça, mas dessa vez termina a frase dizendo para voce pegar a arma\n" + "que esta dentro de uma porta secreta na porta secreta abaixo do templo, apesar da tortura eu nao lhes contei sobre esta\n" + "por favor a use bem, é a unica arma que pode derrotar quem o grupo nomeDoBando buscam, procure nos outros santuarios de \n" + "nomeDoSantuario por mais informações, por favor os impeça…. com estas ultmas palavras seus olhos perdem a vitalidade\n" + "ficando de um jeito estático desconfortante. Largando seu corpo calmamente voce observa a passagem aberta."); break; } if (resp == 2) { System.out.println("" + " \n" + " \n" + " ▒░ \n" + " ▒ \n" + " ▒ \n" + " ▒ ░▒▓▒░ \n" + " ▒ ░▓▒░▒███████▓▓░ ▒▓░▒▓▒ \n" + " ▒ ▒▓▓███▓▒▓▓▒▒▓▓▓████▓▓▓▓▓▓▓░ \n" + " ░▒▓▒▓▒ ░▒░▒▒▓▓▓▓█▓░▒░ ░░░░▒░░ ░░░ \n" + " ▒▓▓▒░▓█▓▒▓▒▓▓▓▒░ ▒░ ░░░░░▒░░ \n" + " ▒▓▒░░░▒▓▒▓▓▓▓▓▒░░▒▒░ ░░░░░░░░░░░░ \n" + " ▒▒▓▒▒▒▒▒▓▓▓▒▒░▓▓▒▒█▓▒ ░░░░░ ░░░░░░░░▒░░░▒░░ \n" + " ░▒▒▒▒░▒▒▒▒░░▒░▒████▓▒▒ ░░ ░░░░░░░░░░░░▒▓▒░░░ \n" + " ░▒░░▒░░░▓▓▓▓██▓▓▓▓▒▒░ ▒▓▒░░░░░░░░░░░░░░░░░░░▒▓▓█▓░░ \n" + " ▒███▒▒░▒▒▒▓██▓▓▓▓▓▓▒▒▒▒░▒▓▓░░░▒▒▓▒▒░░░░░░░ ▒▓▓█▓▒░ \n" + " ▓██▓▓░░▒▒▒▓▓▒▓▓▓▓▒▒▒▓▒░▒▒▒▒▒▒▒░▒░▒▒▒░▒▒▒ ░░▒▓▓▒▒░ \n" + " ▒▒▓▒▒░░░▒▒▒▒▒▓▒▒▒░▒▓█▒▓▒▒░░░░░░░░░░░░▒▒░▒▓ ░▒▓▒▒░░ \n" + " ▒▓███▒░ ░░░░▒▒▒▒▒▒█▓ ░▒▓▓▓▓▓▓▒▒▒░░░░▒░░░▓ ░▒▒▒░░ \n" + " ░▓█▓▓▓▓▒░ ▒▒ ░░▓█░ ░░░ ░▒░░▒▒▓▒░░▒░░ ▒▒ ░▒▒░░ \n" + " ▒▓███▓░ ░▓ ▒▓░▒▒▒▒▒▒▒░░ ░░▒▒▒▒░ ░▓▒ ▒▓▒▒▒▒░░ \n" + " ▒▓▓▓▒▒░ ▓▒ ▒▓▓▓▓▓▒▒░▒░░░▒░░░▒▒░ ░▒▒▒▒▒▒▒▒▒▒▒▒░ \n" + " █▒▒▓▓▓▓▒░░▒▒░ ░░▒▒▓▓▓▓▒▒▒▒▒░░▒░░░░ ░░░ ░ ░ ░▒▒▒▒▒▒▒▒░░░ \n" + " ▒▓▒▓▓▓▓███▓▒▒ ▓▓ ▒▒▒░▒▒▒▒▒▒░▒▒▒░ ░░░░░░░░░ ▒▒░░ \n" + " ▒▒▒▒▒▒▒▓▓▓▓▓▒▓█▓▓▒▒▓▒ ░░ ░▒▒░░▒▒░▒▒▒░░░░░░░░░░ \n" + " ░▒▒▓▓▓▓▓▒███▒▒░ ░░▒▒▒░░▒▒▒░░░░░▒░░░░░ \n" + " ▒▓▒▒▒▓████▓ ▒▒▒░▒▒▒▒▒░▒░░░▒▒▒▒░░░ \n" + " ▒▒ ░▓███▓ ░░░▒▒▒░▒░░░▒▓▓▓▓▒▒░░░░ \n" + " ░▓▒ ░ ░░░▒▒░░▒▓▓▓▓▒▒▒▒▒░░░░ \n" + " ░▓▓░ ░░ ░░▒▒▓▓▓▓▒▒░░░░ \n" + " ░▓▓▒ ░▒▓▓▓▒▒▒░░░░░ \n" + " ░▓▓▒ ░▒▒▒▒░░░▒▒▒░ \n" + " ▓▓▒ ░░░░▓▒▒▒▒▒░ \n" + " ▒▓▒░ ▒▓░▒▒░▒░ \n" + " ░▓▓░ ░░░▒░░▒ \n" + " ░▒▒░ ░▒░▒▒▒ \n" + " ░▒▒ ░░▒▒▒░ \n" + " ▓▓ ░░▒▒ \n" + " ░▒░ ░▒▒▒ \n" + " ░░ ▒▒▓ \n" + " " + "\nA vitalidade esta indo embora de seu velho corpo, esta batalha ele não vencerá… Com suas ultimas forcas ele responde que\n" + "foram os nomeDoBando, novamente pede para que voce os impeça, mas dessa vez termina a frase dizendo para voce pegar a arma\n" + "que esta dentro de uma porta secreta na porta secreta abaixo do templo, apesar da tortura eu nao lhes contei sobre esta\n" + "por favor a use bem, é a unica arma que pode derrotar quem o grupo nomeDoBando buscam, procure nos outros santuarios de \n" + "nomeDoSantuario por mais informações, por favor os impeça…. com estas ultmas palavras seus olhos perdem a vitalidade\n" + "ficando de um jeito estático desconfortante. Largando seu corpo calmamente voce observa a passagem aberta."); break; } System.out.println("Numero invalido, tente novamente\n" + "1-QUEM FEZ ISSO? 2-VOCE IRA FICAR BEM?"); resp = e.nextInt(); } System.out.println("1-IR PELA PASSAGEM"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-IR PELA PASSAGEM"); resp = e.nextInt(); } System.out.println("" + "▒▒▒▓▓▒▒▓▒▓▓▓▒▒▒▒▒▒▒▒░▒▒▒▒▒▒▓▒▒▒▒▓▓▓▓▓▓▓▓▒░ ░▓▓▓▓▓██▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓██▓▓▓▓▓▓ ░░▒▓▒▒▓▓▓█▒░▒▒▒▓▓▒▓▒▒▓▓▒▒▒▒▒▒░░▒▒▒▒▓▒▒▒▒▒▒░░ ▒▒▓▒▒▒▒▒░░ ░▒▒▓░▒▓▒▒▒▒▓░ ░ ░ ░▓▓▒▒▒░▒▒▒░░░▒▓▓▓█\n" + "▓▓░▒▒▒▒▒▒▒▒▒▓▓▓▓▓▒▓▒▒▒▒▒▒▒▓▓▓▓▓▓▓▒▒▒▒▒▓▓▓▓▒▒▓▓▓▓▓▒▒▒▒░▒█▓▓▓▓▓▒▓▓▓▓▓▓▓▒ ░▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒ ▒▒▒▓█▓▒▓▓▓▓▓▓▓▓▒▒▓▓▓▓▓▓▒▒▒▒▓█▓░ ░▒▒▒▓▓▓▓▓▓▒▒▓▒▓▓░▒▓░▒▒▒▓▓░░▒░░▒▒▒▒▓▒▒▒▓▓▓▓▒▒▓▓▒▒▓▓▓▒▒▒\n" + "▒▓ ░▒░▒░░▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▓▒░▒▒░░░▒▒▒░▒░▒▒▒▒▒▒▒▒░ ░▒░░░░░░▒░░▒▒▒▒▓▒▓▓▓▓ ░▓▓▒▒▒▒▒▒▒▒░▒▒ ░░▒▒▒░▒▓▒▓▓▒▒▒▓▒▒▓▓▒▒▒▒░░░░▒▓█ ▒▓▓▓▓▒▓██▓██ ░▒▒▒▒▒▒░▒▓▓▓▒▒▒▒░▒▒▒▒▒▒▒▓▓ ░▒▒▒▒▒▒░ ░\n" + " ▒▒ ▒▒▒▒░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒ ░ ░░▒▒▒░▒▒▒▒▒▒▒▒▒▒▓▒ ░▒▒▒░░░░░░░ ░░▒▓▓▓░ ░▒▒▒▒▒▒▒▒▒░░░▒░░░ ░░░░░ ▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░▒▒ ▓▒ ░░▒▒░░ ▒▓▒▒▒ ░░▒▒░▒▒▒▒▓▓▓▓▒░░░▒▒▒░░░░▓▒ ░░▒▒▒▒▓▒░▒\n" + " ▒▓▓ ░▒▒▒░░▒▒▒▒░▒▒▒░░▒░▒▓▓░ ▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓░ ▒▓▓▒▒▒▒▒▒░▒▒▒░░░░ ░▒▓▓▓▓░ ▒▒▒▒▒▒▒▒▒▒░░▒▒░░░░▒░░▒▒▒▒░░▒░░▒░▒ ▒░▒▒▒░▒▒▒▒░▒█▒ ░▓▒▒▒▒▒░▒▒ ░ ░▒▒░ ░░▒ ░░▒▒▒▒░▒▒▒\n" + " ░▒▓▓ ░░ ░ ░░░░▒▒░░ ▒▒▒▒░▒░░░▒▒▒░▒▒▒░ ▒▓▓▓▓▒▒▒▒▒▒▓▒░░░░░░░▒▓▓▓░ ▒▓▒▒▒▒▒▒▒▒▒░▒░░░░▒░░░▒▒▒░░▒▒░▒░▒▒ ░░▒▒░▒▓▓▓▓▓▓█▒ ▓▓▓▓▓▓▓▒█▓ ░▓▒▓▓▓▓▓▓▓█░ ░░░░▒▓▓░ ░░░▒▒░░▒▒▒\n" + "░░░░░░▒▒░░░░ ░ ░░▒▒▒▒▓▓▓ ░▒▒▒░▒░▒▒▒░░░ ░▒▒▒▒▒░ ░░▒░ ░░░░ ░░ ░░░ ░▒▓▓▒▒▒▒▒▒░▒▒ ░▒▒▒▒▒▓▒▒▒▒▓▓▓▓▒▒▒▒▒▒▓▒░▒▓▓▒▓▒▒▓░ ░▒▒▒▒▒▒█▓ ░▓▓▓▓▒▓▓▓▓▓▓▓▒ ░░▒▒▓▓▓▓█▒ ░░░░░▒▒░▒▒▒\n" + "▒▒▓░ ░ ░▒▒▓▒░▒▒▒░▒▒▓▒▒░ ░▒▓▓▓▒░░░░░░░▒░▒▒░▒▒▒▒░░▒▒░ ░▒▒▒▒▒▒░░░░░░▒▒░ ░░▒▒▒░▒▒▒░░ ░░▒░ ░▒▒ ░ ░ ░░░░░▒░░ ▒▒▒▒▒▒ ░▓▓▓▓▒▒▓▓▓▒░▒▒▓▓ ░░▒▓▓██▓ ░ ░▒▒▒▒\n" + "▓███▓▓░░▒▒▒▒▒▒▒▒░▒▒▒▒▒░▒▒▓▓░ ░ ░░░▒░▒▒▒▒▒░▒▒▒▒░ ░░▒▒▒ ░▒▒▒▒▒▒▒░▒▒░▒░▓▒ ░▒▒▒▒▒▒▒▒▒▒▒▒░▒▒░░░ ░░ ▒▓▓▒▒▒▒▒▒▓▓▒▒▓▓▒▓▒▒▒▓▒▒▒▒▒█ ▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒█▒ ░▒▒▓▒░ ░▒▒▒▒▒▒ ░░░\n" + "▓████▒░▒▒▒▒▒▒▒▒▒▒▒▒░▒░░░▒▒▓▓░ ░░░▒░▒▒▒▒▒▒░░▒▒▒▒▒▒░░░░▓░ ░░░░░▒▓▒▒▒▒▒▒░▒▒▒ ░▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░▒▒░░ ▒▒▒▓▓▓▓▓▓▓▓▒▒▓▒░▒▓▒░▒▒▓▓▓▓▓▓▒░▒░░░▒▒▓▒▒▒▒▒▒▒▒▓▓ ▒▒▒▒▒▓▓▒▓▓▓█▓▓▓░ \n" + "▓▓██▒░▒▒ ░▒▒▒░▒▒▒░░░░░▒▒▒▓▒ ▒░ ░▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▒▒▒▒▒░░▒▒ ░▒▒░░ ░▒░░ ░░░ ░░░░░ ▒▒░░░▒▒▒▒▒ ░▒▒▒▒░▓▒▒▒▒▒▓▓▒▒░░▓▒▒▒▒▓▓▓▓▒▒▓░ ░░▒░░░▒▒░░▒░░ ░▒░▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▒ ░\n" + "▓███░░▒░ ▒▒▒▒▒▒░▒░▒░▒▒▒▒▓░ ▓░ ▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░ ░░▒▒ ░▒▒▒░▒▒▓▓▓▓▓█▓░▒▓▒▓▓▓▒░▒▒▒░░░▒▒░ ░░░▒░ ▒▒▒▒░▒▒░▒▒▓▓▓▒▓▒▒░▒▒▒▒▒▒▒▒▒▒▓░ ░▒▒▒▒▒░ ░▒▓▒▒▒▓██▓▓▓▓▓▓▓▓▒▓▓▒▒▓▒▒██ ░▒\n" + "░▓▒░ ▒▓▒░▒▒▒▒▒▒▒▒░▒▒ ▓ ░▒▒░░ ░░░░░░▒░ ░░░▒▒▒▒▒▒▒▓▒▒▒▓▒▒███▓▓▓▓▓▓▓▓▒▒▓▒▓▒▒▓█▓ ░▒▒▓▒ ░░▒▒▒▒▒▒▒░ ▓▓▓▒▓▒▒▒▓▒▓▓▓▓▓▓▓▓▓▒▒▓▓▒▒▒▒▓▒ ▒▒░░▒▒▒▒▓▓░ ▒▓▓▓▓█▓▓▓▒▓▓▓▓▓▒▒▓▓▒▓▒▒▒▓▓ ░░\n" + "▒▒▒▒▒▒▒▒░░░░░░▒▒▒░░▒▒▒▒▒░▒░ ░░░▒▒▒▒░▒ ░▒░▒▒▒▓▓▓▓▓▓▓▓▓▓▒░░▒▒▓▒█░░▓▓▓▓▓▓▓▓▓█▒░▓▓▓▒░▒▒▓▓ ░▓▓▓▓▓▓█▓██▓▓███▓ ▒▒▓▓▓▒▓▓▓▓▓▓▓▒▓▓▓▒▓▓▓▒▓▓▒▒▓░ ▒▒▒░░▒▒▒▓▓ ▒▒▓█▓▓▓▓▓▓▓▓▓▓▒▒▓▓▓▓▓▓▒▓▓▓ ░▒\n" + "▓▓▒▒▒▒▓▓▒▒▓█▒░▒▓▓▓▒▓▓▒▒▓▓▓▓▓▓▓▒▒▒▒▒▓█▓ ░▒▒▒░░░▒▒▓▓▓▓▓▓▓▒░░▒▒▓█░▒▓▒▓▓▓▓▓▒▓█▓░▓▓▒▓▓▒▒▒█▒▒▒▓▓▓▓▓▓▓▒▓▒▓▒▒▓██ ░░▒▒▓▓▓▓▓▓▓▓█▓▓▒▓▓▓▒▓██▒▓░ ░░▒▒▒▒ ▒▒▓▓▓▓▓▓▓▓▓▓▒▒▓▓▓▓▒▓▓▓▒▓█▒ ░▒\n" + "▒▒░▒▒▒▒▒▒▓▓▒▒▓▒▒▒▒▒▒▒▒▒▒▓▒▓▒▒▒▒▒▒▒▒▒▓█ ░▒▒▒░░░░▓▓▓▓▒▒▒▓▒▓▒▒▒▓▓▓ ▓▓▒▒▓▓▓▒▓▓▓▓▓▒▓▓▓▓▓▒▓▓▓▒░▒▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓ ░░ ░▒▒▒▒▒▒▒▒▒░ ░░ ░░ ░░░░░░ ░░▒░▒▒ ░▒░▒▒▒▒▓▓▓▓▓▓▒▒▓▓▓▒▒▒▒▓▒▓ \n" + "▒▓▒▒▒▒▒▓▒▓▒▓▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒░░▒▒▒░▒░░▒█ ▒▒▒▒▒▒░░▒▓█▓▓▒▓▓▓▓▓▓▓▓█▓ ░▒▒░▒▒▒▒▓▓▓▓██▓▓▓▓█▓▓█ ░░▒▓▓▓▓▓▓▓▓▓▒▒▓▓▓▓ ░ ░░░░▒ ░ ░ ░▓▒▓▓███▓▓▓██▓█▓ ░▒▒▒▓▒▒▒▓▓▓▓▓▒▓▓▓▓▓▓▒▒▒▓▒ ░▒░\n" + "▒▓▓▒▓▓▓▓▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒░▒▒░░░▒▒░▒░░░▒▓ ░ ░▒░▒░░░ ░▒▒░▒▓▓▓▓▒░▒░▒░░▒░░▒▒░░▒▓▓▓▒▒▓▓▓▓ ▒▒▓▓▓▒▓▓▓▓▓▒░▒▒▓▒ ▒▒▒ ▒▒░▒▒▒▒▒▓▓▓▓█▓ ░▓█▓▒▒▓█▓▓▓▓▓▓▓▓█░ ▒▒▓▓▓▒▒▒▓█▓▓▓▓▓▓▓▓▒░▒▒▓ ▒▓█\n" + "░▓▒▒▒▒░░░▒▒▒▒▒▒▒▒▒ ░▒▒▒▓ ░▒▒▒░ ░▒▓ ░▓▓▒▓▓▓▒ ░ ░▒▒▒▒▒▒▒▒ ░░░▒░ ▒▓▓▒▒▓█▓▓▓▓▒▒▓▓ ▒░▒░ ▒▒░░▒▒▒░▒░░▒▓█░ ░▒▒▓▓░▓▓▓▒▓▓▓▓▓▓██▒ ▒▒▒▒▒▒▒▒▓█▓▓▒▒░▒▒▒▓▒▒ ░▒▒▓\n" + " ░░░░▒▒░ ░▒▒▒ ░ ░░░░░ ░▒▒░▒░▒▓▓▒░▓▓█░ ▒▒▓▓▓▓█▓ ▒▒▒▒▒▒▒░ ░▒▒▒▒▓▓▓▒▒▒▓▒▓▓▓▓▒ ▒▓▒▒▒██▓▓▓▒▒▒░░ ░ ▒▒▒░▓▒░▒▒░░▒░░▒▒▓░ ░▒▒▒░░▒▓▓▓▓▓▒▒▒███▓ ░ ░░░ ░▒▒▒░░░ ░▒▒▒\n" + "▓▓▓▓▓▓███▓▒▒▒░ ░▒▒▒▓░▒░ ░░░░░░░░░▒▓▒░▒▒ ▒▒▒▓▒▓▓▒▓▓▒▓█▓ ▓▓▓▓▓▓▓▓▓▒▓▒▒▒▒▒▓▒▒▒▒▒▒▒▒▓▓▓▓▒ ░░░▒▒░░░░░ ░▒▒▒▒▒▒▒▒░▒▒▒░░▒░ ░▒░░░▒▓▓▓▓▓▒▒▒██▓ ░▓▓█▓░░▒░ ░▒▒▓▓▓▓▓▒▒ ░\n" + "▓▓▓▓▓▓▓▓█████░░░░░░▒▓▓▒▓▒ ░░░░░░░▒▒░▒▒▒▒▓▒ ░░░░▒▓▒▓▓▓▒▓▓ ░▓▓▒▓▒▒▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▓█░ ░▒▓▒▓▒▒▒▒▒▒▒▓▒▓▒░ ░▒▒▒▒▒▒▒▒▒ ░▓▓▓▒░ ░▒▓▓▓▓▓▓██░▒▓▓▓▓▓▓▓▓█▓ ░▓▒░ ░▓▓▓▓▓▓▓▓▓▒▒▒\n" + "▓▒▓▒▒▓▓▒▒▓▓▓▓░░░ ░░▒▒▓▓▒ ░░░ ░░░▒▒▒▒░▒▒▓█▓ ░░░▒▒▒▒ ▒▓▒▓▓▓▓▓▒▓▓ ░▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▒▒░░░░▒▓▓▓▓▓▓▒░▒░░▒▓▓▓▓▓▓▒░ ░▒▒░ ░▒▒▓▓▓▓▒▒▒▒░ ░▒█▓░ ▒█▓▓▓▓▓▓▒▓▓▓ ░▒▒░░░░▓▓▓▓▓▓▓▓▓▓▓\n" + "▓▓▓▒▒▒▓▒▒▓▓░▒▒░ ▒▒▒░ ░░ ░░▒▒▒▒▒░▒▒▒▒▒░▒▒▓▓█▓▒▓▓▒░ ░░░░▒▒▓▒▒▒ ░▒▒▒▒▓▒▓▓▓▓▓▓▒░ ░░▒░░▒▓▓░▓▓▒▓▒▒ ░▒▓▒▒▒▒▓▓░ ░░▒▒▒▒▒▒▒▒▒▒▓▓▓▒ ▒▒▓▓▒▓▓▓▓▓▒▓▓▓▒ ░▒▒▒░░▒▒▒▓▓▓▓▓▓▓▒▒\n" + "▒▒▒▓▓▓▓▓██▓░░ ░░ ░░░▒░ ░ ░ ░▓░ ▒▓▒▒▒▒▓▓▓▓▓▒▓▓▓▓█▒▒ ░░▒ ░▒▒░░▒▒▒▓▓▒░░░ ░░░░░░▒▓▓▒▒▒░▓▓▒░░░▒▒▒▒▓▓█▓ ░▒▒░▒▒▓▓▒▒▒▒▒▒▒▒▒▓▒ ░▒▒▒▓▓▒▒▒▒▒▓▒▓▓▓█▒ ░▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▒\n" + "░░░░░░▒░░░ ░▒▒▓▓▒░▒▒▒▓▓▓▒ ░▒▒ ░▓▓▓ ░▒▒▒▒▓▓▒▓▓▓▓▓▓▓▒▒▓▓▓▓▒ ░▓▓▒▒▒▒▒▒░▒ ░▒░▒▒░░░▒▒▒▒▒▓▓▓▒░░░▒▒▒░ ░ ▒░░ ▒▒▓▒▒▒▒▒▓▒▒▒▒▒▒░░▒▓▓ ░▒▒▒▒▒▓▓▒▒▒░▒▓▓▒▒▒█▓ ░▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓\n" + " ▒▒▒▓▓▒ ░▒▒▒▓▒▒▒▒▒▓▓▓▓██▓███▓░░▒▒ ░ ░▒▒▒▒▒▓█▓▓▓▓▓▓▓▓▒▒▓▓█▒ ░░░▓▓▓▓▓▓▓▓▓▓▓ ░▒░░ ░░░░░░░ ░░░▒▒░▒▓▒▒▒ ░▒▓▓▒░▒▒▒▒▓▒▒▒▒▒▒▓░░▒▒▒▒▒▒▒▓▒▒▓▒▒░▒▒▒▒░░ ░░▒▓▒▓▓▓▓▓▓\n" + "▒▒▒▒▒▓▒▓██ ░▒░▒▒▒▒▒░▒▒▓▓▒▓▓▓▓▓█▓███▓▓▒░░░ ░▒▒▒░▒▓▓▓███▓▓█▓▓▓█░ ░░░░██▒▓▓▒▓▓▓▓█░ ░░▓▓░░▒▒▓▓ ░█▓▓▓▓█▓ ░▓▓▓▓▓▒░▒▒▒▒▓▓▒ ░▒▒▒▒▒▒▒▒▓▓▓░░ ▒▓▒▒▒▓▒▒▓▒░▒▒░ ░░▒▓▒▒░▒ ░ ░ ░░ ░░\n" + "▒▓▓▒▒▒▒▒█▓ ▒▒▒▒▒░▒▓▓▒░▒▒▓▓▓▓▓▓▓▓▓▓██▓███▓░ ░▒▒▒▒▒▒▒▓▒▒░▒░░░▒ ░▒▒▓▒▒▒▓▓▓▓▓▓░ ▒▓████▓▓▓▓▓░▒▓▓▓▓▓██░ ▒▒▓▒▒▒▒▓▒▒▒▒▒▓▓▒ ░░░░░░░▒░ ░▒░ ░▒▒▒▓▒▓▓▓▓█▓█▓ ░ ▒▒▓▓▒░░ \n" + "▒▓▓▒▒▒▒▒█▒ ▒▒▒▒░▒▓▓▒░▒▒▓▒▓▓▓▓▓▓▓▓▓▓█▓▓█████▓ ░░░░░░░░▒▒▒▒░ ░▒▒░░▒▒▓▓▓ ░▒▓▓▓▓▓▓▓▒▓▓▓▓▒▒▒▓▓▓▓░ ▒▒▒▒▒▒▒▒▒▒▒▒▒░░ ░░░░░░▒▒▒ ░░░░░░░▒░ ▒▓▓▓▓▓▓▒░▒▓▓▒▒▓▓▓░▒░ ░░ ▒░ ░░░\n" + "▒▓▓▒▒▒▒▒█▓ ░▒▒▒▒░▒▒▓▒▒▒▒▒▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▒░▓▓░ ▓▓▓▓▓██▓▓▓▓▓▓▓▓▓▓░ ░▒▒▒▒░ ▒▓▓▓▓▒▒▒▒▓▓▓█▓▓▓▓▓▓█▒ ░░░░░░▒▒▒▒▒▓▓▓▓▓▓▒▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ ░▒░▒▓▓▓▓▓▓▓▓▓▒▒░▓█▒▒▒░▒▓███ ░ ░\n" + "▒▒▓▒▒░▒▓█▓ ░▒▒▒░▒▒▓▓▓▓▒▒▒▒▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓██░▓▓█▓▓▓▓▓█▓▓▓▓▓▓▓████ ▒ ░▒▒▒▒▒▒▓▒░░░▒▒▒▒▒▓▓█▓▓▓▓█▓ ░░░▒░░░░▒▒░░▓▒▒▓▓▓▒░▒▒▒▒▒▒▒▓▓▓▒▓▓▒▒▓▓█▓ ▒░▒▒░▓▓▓█▓▒▓▒▓▓██ ▒▒▓▒▓██▒ ░░▒▒\n" + "░░░░░░ ░ ░▒▒▒▒▒▓▓▓▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓█▓█▒▓░▒▓▓▓▓▓▓▓▓▓▓▓▒▒▓▓▓▓███ ▒▒ ░░ ░▒▒▒▒▒▒░░░░▒░░░▒▒▒▒▒▒▓░ ░░░░░░░░░░▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒▒▓▓▓▒▒▓▓▓▓▓▓█░ ▒▒░░ ▓█▓▓▓▒▓▓▓▓█▒░░▒▓▒██▓ ░▒▒▒░\n" + "▒▓▒▓▓▓▓▒ ▒▒▒▒▓▓▓▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▓ ░░▒▓▓▓▓▓▒▒▒▓▒▒▓▓▓▓▓███░ ░▒▒▒░░▒▒▒▒░░▒▒▒▒▒▓▒▒░▒░▒▒▒▓▓ ░░░░░░░░░░▒▒▒▒▒▒▒▓▒▒▓▒▒▒▒▓▓▒▒▓▒▓▓▓▓▓▓▓█▒░▒░░░░▒██▓▓▒▒▒▓█░░▓▒ ▒███▒ ░▒▒▒▒░\n" + "▒▓░▒▒▒▓█▓░ ░▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███▓ ▒▒▒▒░░▒▓▒░░▒▒▓▒▓▒▒▓██▓ ░░░▒▒░▒▒▒▒░▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▒▒▓▒ ░░░░░▒▒▒▒▒▒░░░░░░░░░░ ░▒▒▒▒▒▓▓▓▓▒▓▓▓█▒░░░░░░░▓▓▓▓▓▓▓█░ ▒█▓▓▓███░░░▒▒▒░▒\n" + "▓▒▒░▒░▒▒▓▓░░ ░░▒▒▒▒▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ░▒░░░▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒ ░ ░░░░░░░░░▒▒▒░░░░░░░░░░░░▒▒▒▒▒▓▓▓▓▓▒ ▒▒░▒░░ ▓▓▒██▓▓░ ░▒▓██▓ ░▒▒▒░▒▒\n" + "▓▒▒░▒▒░▒▓▓▒ ░░▒░▒▒▒▒▒▒░░░░░ ░ ▒▒▒▒▒▒░░▒▒░░░▒▒▒▒▒▒░ ░░▒▒▒▒░░▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░▓▓ ░ ░░▒▒▒░░░▒░▒░▒░▒░░░░░░░░ ░░▒░░░░▓▓▒▒░ ░▒ ░░▒░▒▒░▒\n" + "▒▓▒▒▓▒▒▓█░ ░▒▒▒▓▒▒▒▒░▒░ ░░ ░░░ ░░ ░ ░░ ░▒░▒▒▒▒▒░░▒░░░░░▒▒░░▒▒▒▒▒▒▒▒▓▓ ▒▒▒▒▒▒▒▒▒░▒▓▒ ░░░░░░ ░░ ░░▒▓▒ ▒▒▒▒▓▓▒▒▒▒▒░ ░░░░░░░░▒\n" + "▒▓▒░▒▒▒▓░ ░ ░░░ ░ ░░░░ ▓▓▒ ▓▓▒▒█▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▒▒░░░ ░░▒▒▒▒▒▒▒░▒░░▒▒░▒░▒▒▒▒▒▒▒░▒▒▒▒▒▒▒░░░▒▒▒▒▓▓░ ░░▒▒▓▓▓▒▓▓█▒▒▒▒▒▒▓▓██▓ ░▒▒▒▓▓▓▓▓▒▒▓░ ░░░░░░░░▒\n" + "▒▒▓▒▒▒▒ ░▓▓▓▓█▓▓▓▒▒░▒░▒░▒▒░ ░░ ░▓▓░▓▒▓▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▒▓▓▒░ ░░░▒▒▒▒▒░▒▒▒▒▒░░░░░ ░░ ░░░░▒▒▒▓▓▓█▓███████████ ░░▒░▒▓▓▓▒▒▒▒▓▒▒▒░░▒▒▒▒▒▒▓▓▓▓▓▓▓▒ ▒▒ ░░░░░░░░░▒" + "\ndescendo a pequena escada voce avista o pequeno altar ao fundo, onde provavelmente estava o fragmento do orbe.\n" + "Observando a pequena sala, com suas paredes de pedra com varias inscricoes em alguma linguagem\n" + "desconhecida a voce, como o tio Jorge lhe disse voce procura pela pedra que abre a tal porta secreta\n" + "1-EMPURRAR A PEDRA AO NORTE 2-EMPURRAR A PEDRA AO SUL 3-EMPURRAR A PEDRA AO OESTE 4-EMPURRAR A PEDRA AO LESTE"); resp = e.nextInt(); while (resp != 3) { Som.emppedra(); if (resp == 3) { System.out.println("Voce percebe que esta pedra pode ser empurrada, a pressionando algumas outras pedras abrem\n" + "mostrando uma pequena adaga com varias inscricoes runicas e um cabo transparente de vidro, ali dentro\n" + "parecia haver alguma substancia liquida que parecia mexer-se sozinha."); break; } System.out.println("Não é essa pedra, tente novamente\n" + "1-EMPURRAR A PEDRA AO NORTE 2-EMPURRAR A PEDRA AO SUL 3-EMPURRAR A PEDRA AO OESTE 4-EMPURRAR A PEDRA AO LESTE"); resp = e.nextInt(); } fase3(); return 0; } public void fase3() throws Exception { Scanner e = new Scanner(System.in); System.out.println("2-SACAR SUA ADAGA"); int resp = e.nextInt(); if (resp == 2) { System.out.println("" + "███████████████████████████████████████████████████████████████▓█████\n" + "██████████████████████████████████████████████████████████████▓▒▒▓███\n" + "████████████████████████████████████████████████████████████▓░▓▓ ▒██\n" + "███████████████████████████████████████████████████████████▓░▓▒ ▒███\n" + "█████████████████████████████████████████████████████████▓▓▓▓░ █████\n" + "███████████████████████████████████████████████████████▓▒▓▓▒░ ▓██████\n" + "████████████████████████████████████████████████████▓▒▒▒▒▒░ ▓███████\n" + "██████████████████████████████████████████████████▒▒▒▓▓█▒░ ░▓████████\n" + "████████████████████████████████████████████████▓▒▒▓▓██▒░ ░▓█████████\n" + "███████████████████████████████████████████████▒▒▓▓██▓▒░ ░▓██████████\n" + "██████████████████████████████████████████████▒▒▓███▓▒ ░░▓███████████\n" + "█████████████████████████████████████████████▓▒▓███▒░░░░▓████████████\n" + "████████████████████████████████████▓███████▒░███▓░ ▓█████████████\n" + "█████████████████████████████████▓▓▓▓▓█████▒░▒██▒ ▒███████████████\n" + "████████████████████████████████▓▓████▓▒▒▒░ ▒▓▓░ ▓█████████████████\n" + "██████████████████████████████████████▒▒▓▒▒▒▒░▒░░░███████████████████\n" + "█████████████████████████████████████▓▒▓▒▒▒▒▒▒░░ ░▓▓██▓▒▒▓██████████\n" + "████████████████████████████████████▓▒▒▒░▒▒░▒▓▓░░▒ ░▒▒▒▓████████████\n" + "████████████████████████████████████▓▓▒▒▒▓▒▒▓▓▓████▓▓████████████████\n" + "████████████████████████████████████▒▓▓▒▓▒░▒▒▓███████████████████████\n" + "██████████████████████████████████▓▓▓▒▓▓▓▓░▓▒▓███████████████████████\n" + "█████████████████████████████████▒▓▓▒▒▓▓▒ ▒▒▒▓███████████████████████\n" + "████████████████████████████████▒▓▓▒▓▓▓▒ ▓▓▓█████████████████████████\n" + "███████████████████████████████▓▒█▒▒▒▓▒ ▓████████████████████████████\n" + "██████████████████████████████▓▓█▓▒▒▓▓ ░█████████████████████████████\n" + "█████████████████████████████▓▓█▓▓▓▓█▓ ▓█████████████████████████████\n" + "████████████████████████████▓▓█▓▓▓▓▓ ▓██████████████████████████████\n" + "███████████████████████████▓▓▓▓▓▓▓ ▒████████████████████████████████\n" + "█████████████████████████▓▓▓▒▓▓▓ ▒██████████████████████████████████\n" + "███████████████████████▓▓▓▓▒▓▓▒ ▒████████████████████████████████████\n" + "██████████████████████▓▓▓▓▓▓▒▒▒██████████████████████████████████████\n" + "████████████████████▓▓▓▓▓▓▒░▓████████████████████████████████████████\n" + "██████████████████▓▓▓▓▓▓▒▒▓██████████████████████████████████████████\n" + "█████████████████▓▓▓▓▓▒▓█████████████████████████████████████████████\n" + "██████████████▓▓▓▒▒▒▓▓███████████████████████████████████████████████\n" + "█████████████▓▒▒▒▒▓██████████████████████████████████████████████████\n" + "███████████▓▓▓▒▓█████████████████████████████████████████████████████\n" + "█████████▓▓▓▓▓███████████████████████████████████████████████████████\n" + "██████▓▓▓▓███████████████████████████████████████████████████████████\n" + "█████████████████████████████████████████████████████████████████████" + "\nCom a adaga em mãos voce com toda a sua raiva e desejo de vinganca decide partir em busca de respostas\n" + "e sangue por parte dos agressores. Achava que o passado de lutas e tragédias tinha ficado para tras\n" + "mas novamente ele bate a minha porta, pensa voce. Está na hora de voltar a ser quem eu era.\n" + "Com esta frase dita em sussurros para si mesmo, voce pensa em seus antigos armamentos, escondidos\n" + "em um bau no porão da sua casa, voltando por todo o caminho de destruição já visto. "); } System.out.println("1-SAIR DO ALTAR SECRETO"); resp = e.nextInt(); switch (resp) { case 1: { System.out.println("Novamente dentro do templo voce observa os corpos, nada mais importa, somente a vinganca, uma ultima olhada\n" + "em seu mais fiel amigo nomeDoTiu, voce lembra de suas palavras e agora passando por toda a carnificina\n" + "encontra-se no meio do templo, encarando a saida e as chamas do lado posterior."); } } System.out.println("1-SAIR DO TEMPLO 2-SAIR DA VILA"); resp = e.nextInt(); if (resp == 1) { System.out.println("Apenas passando por toda a destruição voce segue seu caminho. nada mais importa, apenas a vinganca."); } if (resp == 2) { System.out.println("Caminhando com os passos pesados de furia voce dirige-se para sua velha casa, a pequena trilha que\n" + "voce sempre passou para visitar seus amigos agora so lhe tras as visões de seus corpos estirados ao chão\n" + "sem vida. Ao longe voce avista sua casa, mais lembrancas surgem, tudo ali de algma forma teve algum\n" + "envolvimento com essas pessoas que lhe acolheram tão bem, e agora já não existem mais. "); } System.out.println("" + "" + " \n" + " ░ \n" + " ░ ░▒ \n" + " ░░ ▒░ \n" + " ▒ ░ \n" + " ▓ ▒▒ \n" + " ▒█▓▓█▓ \n" + " ░▒░░░▓█ ░ \n" + " ▒▒▒▒█▓ ░░ \n" + " ▒▒▒▒█▓ ░░ \n" + " ▒▒░░█▓ ░ \n" + " ░▒▓▓▓▓▓▓▓▒█▓ ▒ \n" + " ▒▓████████████▓░ ▒█▓░ \n" + " ░███▓▓▓▓███████████▓▒░░ ▒▒▓▓ \n" + " ▓█▓▓▓████▓█▓███▓███████████▓░ ▒▒▓▒ \n" + " ░█▓ ▒▓███▓▓▓█▓▓▓█▓███████████ ▒▒▓▒ \n" + " ░█▒ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███████▓ ░▒▓█▓███▓ \n" + " ░█▒ ▒▓▓▓▒▒░ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████▓█▓████▓ \n" + " ░█░ ░▒▓█▓▓█▒ ▒█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████░ \n" + " ░█▒ ░░ ▒████▒ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▓ \n" + " █▒ ░░░▒▓▓██▒ ░ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████▓ \n" + " ▓▓ ░░▒░░▒▓▓▓▓░░░░ ▒▓▓▓▓▓▓▓▓▓▓█▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████ \n" + " ▓▓ ░▒▒▒░░▒░░░░░▒░░ ▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███▓██ \n" + " ▓█ ▒▒▒▒░░░░░░░░▒▒▒▒░ ▓▓▓▓▓▓▓▓▓▓▓▓▓█▓███▓▓▓▓▓▓▓▓▓▓▓▓▓█▓ ██ \n" + " ▒█░ ░▒▒▒▒░ ░▒░░░ ▒▒▒▒░ ░▓▓▓▓▓▓▓▓▓▓▓███████▓▓▓▓▓▓▓▓▓▓▓██░ ░ ██ \n" + " ▓▒ ░░▒░░░▒▒▒▒▒▒▒▒▓░░░░░░ ░▓▓▓▓▓▓▓█████▓░░▒▓███▓▓▓▓▓████▓▒▒▒▒ █▓ \n" + " ░ ░░▒▒▒░░█▓▓██████▒ ▒▒░░░▒░▒ ▒███░░░ ░▒▒ ▒▓░████▓▒▒▓▓▒███▒▒░▒█ \n" + " ▒▓░░▒▒░▓▓▓▓▓▓▓▓█▒░▒░░▒░▓░ ░██▓ ░ ░██░ ░ ████ ░▓▒▓█▓▓▓▓ \n" + " ▒▓▒░░▒░▓▒▓▓▓▓███▒░░▒▓▒▒▓░ ░▓▓▓░░░░░██▒░ ░░██▓▓░░░▒▓▓▓▓▓▓▓▒ \n" + " ▒▒▒▒░░░█▓▒▓▓████▒ ▒▒▒▒░▓░ ░▓▓▓░░▒▒░▓█░░░░░▒██▓▓░▒░▒▓▓▒▓▓▒▓▓ \n" + " ░▒▒▒▒░▒▒▓▒▓▓▓▓▓▒░░▒░▒▒▒▓▓▓█▓▓▒▒░░▒▒░░░░░░░░░░░▒▓▒▒░░▓▒▓▓▓▓▓█▒ \n" + " ░▓▒▓▓▒▓▓▒▓█▓▒▓▓▓▒░ ▒▒▒▓▒▓▓▓▓██▓▒ ▒▒▒░░░▒ ░░░▒▓▓▓▓███▓▓███████░ \n" + " ░▒▓▓▒▓▓▒▒▒░▒▒░░░▒▒▓▓▒▒▓▒▒▒▒░░▒▒▓▒░░▒▒░░░░░░▒█▓▒▒▒▒▒▒▓▒▓█▓▒▓▓▓████▓░ \n" + " ▓▓▓▒░▒▒░░▒▒░░░░ ░▒░▒▓▒▒▒▒▓▒▒ ░░▒▓▓▓▒▒▒░░░░░▓▓▓▒▒▓▒▒▒░▒▒▒▒░░ ▒░▒▓██▓ \n" + " ░▒▒░░▒▓▓▒▒▒▒▒░ ░░░▒▒░ ░▒▓▒░░░ ░▒▒▒▒░▒▓▓▒▒▒▒▒▓▒ ░░ ░ \n" + " ░ ░ " + "\nCaminhando com os passos pesados de furia voce dirige-se para sua velha casa, a pequena trilha que\n" + "voce sempre passou para visitar seus amigos agora so lhe tras as visões de seus corpos estirados ao chão\n" + "sem vida. Ao longe voce avista sua casa, mais lembrancas surgem, tudo ali de algma forma teve algum\n" + "envolvimento com essas pessoas que lhe acolheram tão bem, e agora já não existem mais. "); System.out.println("1-ENTRAR NA CASA"); resp = e.nextInt(); if (resp == 1) { Som.porta(); System.out.println("Adentrando a residencia voce dirige-se para o alcapao sob a mesa da sala.\n" + "Voce move a mesa, e vizualiza um alcapao trancado por um velho cadeado."); } System.out.println("2-DESTRANCAR"); resp = e.nextInt(); if (resp == 2) { Som.cadeado(); System.out.println("" + " \n" + " ░▓▓▓▓▓▓▓▓▓▓▒ ░▒▒▒▒▒░░░░░░░ \n" + " ░█▓▓▓▓▓▓▓▓█▒ ▓▓▓▓▓▓▓▓█▓██▒░▓▓██▓▓▓▓▓▓▓▓░\n" + " ░▓▓▓▓▓▓▒▓▒▓▒▒▓▓▒▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓ \n" + " ░▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▒▒▒▓▒▓▓▒▒▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ░▓▓▓▓▓▓▒▓▒▓▒▒▓▓▒▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▒▓▓▓▓▒ \n" + " ░▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▒▒▓▓▒▓▓▒▒▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▓▒▓▓▓▒▒▓▓▓▒▒▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▒▓▓▓▓▒ \n" + " ▒▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▒▒▒▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▒▓▒▓▓▒▓▒▒▓▒▓▒▒▓▒▓▒▓▒▓▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▓▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▓▓▓▒▓▒▒▓▒▓▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▓▒▒▓▒▓▓▒▓▓▒▓▒▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▓▓▓▒▓▒▒▓▒▓▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▒▓▒▓▓▒▓▒▒▓▒▓▒▓▓▒▓▓▒▓▒▒▓▓▒▓▒▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▒▓▓▒▓▒▒▓▒▓▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓ \n" + " ▒▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▓▒▓▓▒▓▒▒▓▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▒▓▓▒▓▓▒▓▒▒▓▒▓▒▓▓▒▓▒▓▓▒▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▓▒▓▒▒▓▒▒▓▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▓▓▓▒▓▒▒▓▒▒▓▓▓▒▓▒▓▓▒▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▒▓▓▓▓▒▒▓▓▓▒▓▓▓▓▓▒▓▒▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓ \n" + " ░░░░░░▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ░░▓▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░░░ ░ \n" + " ▒▒ ░░░░░░░░░▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ \n" + " ▒▒ ░░░░░ ░ ░░░░ \n" + " ▒▒ ▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒░▒░░░░░░░░░░░░░░░░░ \n" + " ▒▒ ▒▒▒░▒░▒░▒░░░▒░░░░░░░░░░░░░░░░░░░ ░▒▒░ \n" + " ▒▒ ▒▒▒░▒░▒░▒░░░▒▒▒░░░░░░░░ ░░ ░▒░ ░ \n" + " ▒▒ ░▒▒▒░▒▒▒░▒░▒░▒ ░░░░▒░ ▒▓▓▓▒ ▒▓█▒ \n" + " ▒░ ░▒▒▒▒▒░▒░▒░▒░░ ░▒░ ░▓███████▓▒░░░ \n" + " ▓░ ░▒▒▒▒▒▒▒░▒▒▒░ ▓█████▓▓▒░░ ░▒▒▓▓███ \n" + " ░▓ ░▒▒ ░░░▒░ ▒███▓█▓███████▓▓▒░ ▒░ \n" + " ░▓ ░▒░ ▒▓▒▒ ▓▓▓▓▓▓█▓█▓▓▓▓████████▓▓ \n" + " ░▓ ▒▒ ▓████████▓▓▒░ ░▒▒▓▓▓█▓█▓█▓███▓ \n" + " ▒▓ ▒▒ █████████████████▓▓▒▒░ ░▒▒▓▓█▓ \n" + " ░▒ ▒▒ ▒██████████████████████████▓▓▒▒░ ░ \n" + " ▒▒▒▒▒▓▓▓▓██████████████████████████ \n" + " ░░▒▒▒▓▓▓▓████████████▒ \n" + " ░░▒▒░ " + "\nUsando a chave pendurada em seu pescoço, voce abre o cadeado do alcapao"); } System.out.println("3-ABRIR"); resp = e.nextInt(); if (resp == 3) { System.out.println("Dentro ha um velho baú, dentro dele ha algo que voce jurou nunca mais usar\n" + "mas desta vez era diferente, as mesmas pessoas que o fizeram realizar este juramento\n" + "já não estao neste mundo, voce não sente qualquer remorso em quebrar esta promessa\n" + "e leva suas maos nele."); } System.out.println("4-ABRIR BAU"); resp = e.nextInt(); if (resp == 4) { Som.itemLendario(); System.out.println("Dentro estão fotos de seu passado que você não gostaria de ter revisto, e alguns itens lendarios."); } System.out.println(""); System.out.println("0-EQUIPAR ARMADURA"); resp = e.nextInt(); if (resp == 0) { System.out.println("" + "██████████████████████████████████████████████████████████████████\n" + "██████████████████████████████████████████████████████████████████\n" + "████████████████████████████████████████████▓▓▒▓██████████████████\n" + "█████████████▓▒▒▒▒▓▓▓▒░░░░▒▓████████████▓▓▒░ ░▒▒▒▓▓███████████\n" + "█████████▓▓▓▒▓▓▓▓█▒ ▒██████████▓▓░▒▒▒▓▒▒▓▒░░░ ░▓████████\n" + "███████▓▓██▒████▓ ░▒▒░ ░░ ▒███████▒▒▓░▓█▓█▒▓███▓▓▓▓▒▒░ ▓█████\n" + "██████▓▓██░▓██▓▓ ░▓█▒ ░ ░▓███▓▒▒▓▒▓███▓▒███████▓▓▓▓▒ ▒████\n" + "█████▓▒▓█▓▓█▓▒▓░ ▓▓▒░ ░▒░ ▒▓▓▒░░▒█░▓▓▓▓ ▒▓▓▓▓▓▓▒▒▓▓▓▒▒░ ▓███\n" + "████▓░░░░▒▒░░░█ ▒▒▒░▓░ ▒░ ▓▓░▒▒▒▒▒█░░▒▒░ ▒░░░░░░░▓▒▒▒▒▒▒░ ░▒█\n" + "███▓░▒▒░ ░ ▒█ ▓▓▒░█░ ░░ ░▓▒ ░▓▓▒▒░█▒ ░░▒▒▓▓▓▓▓▓▓▓▒░ ░░░░ ▒█\n" + "████▒▒▒▒▒▒▒▒▒█▒░▓▓▓▒░▓▓▓▓▓▒▓▓▒▒▓▒▒▓▒ ▒██████▓▓▓▒░ ░▓▓███████\n" + "███████████▓▓▓░▓█▓▒▓▒▓▓▒▓▓▓▓▓▓█▓▓▓▓░ ░▒▒░ ▒▓█████████████\n" + "██████████████▒▓██████▓▒██████▓▓▒▓▒░ ▓░ ░ ▓██████████████\n" + "█████████████▓ ▒██████▓▒███████▓▒░ ░▒▓▓▓░ ░▒░ ▒██████████████\n" + "██████████████▒▓▒▓▓▓▓▓▒▓█▓▓█▓▒▓▒▒▒▓▓▓▓▒▒▒▓▓▒░▒▒▒▒ ▒███████████████\n" + "██████████████░▒▓▒░░░░ ▒▓▓▓▓▒░░▒██▓░▒▓▓▓░ ░▒▒▒░ ▒████████████████\n" + "██████████████▓ ▒▒ ░▒▒▒▓▓▓▒▒▒░▒▓▒▒████▓▓░▒░░ ░░░ ▓████████████████\n" + "███████████████▒ ▓▓▓▓▓▒▒▒▓▓▓▒▓▒░▒▓▓▒▒▒░▒▓▒▒▒▒▒ ▓████████████████\n" + "████████████████▓▒▓▒▓▓▓▓▒▒ ▒█▓▓▒░ ░▒▒▒▒░░░ ▒███████████████████\n" + "████████████████▓▒ ▒▒▒▓▒▒▒ ▒▓▓▓▒▒▒▒▒▒▒▒░ ▒███████████████████\n" + "█████████████████▓▓▒▓▓▓▒▒▓▒▒░ ░░▒░░░░░ ▒▓▓█████████████████████\n" + "███████████████████▒▓▓▓▒▒▒▒░▒░░░▒▓▒▒▒▒░░▒ ▓███████████████████████\n" + "██████████████████▓░░▒▒░ ░ ▒░░▒▒▓▒░▒▓ ░▒ ███████████████████████\n" + "██████████████████ ░█▓▓▓▓▓▓▒▒ ░░░░▒ ▒▒ ░░ ▓█████████████████████\n" + "████████████████▒▒░░▓▒▓▒▒▒▒▒▒▒░ ▓▓▓▓░▓▒░▓░░ █████████████████████\n" + "███████████████▓ ▒▓▓░░▒▒▒░░ ▓▒▒█░▒░ ▒░▒▓▒░░ ▓███████████████████\n" + "███████████████░ ░▓▒▓▒░░░ ▓▒▒▒░▒▒ ▒▒░▒▒░░░ ▓██████████████████\n" + "█████████████▓░ ░▓▒▒█▓▓▓▒▒▒░░▓▓░▓ ░▒░░▒░░▒▒░░░ ▓█████████████████\n" + "█████████████▓░▓▓▓▒▓▓░▓▓▒▒▒░░▓▓ ▓▒▓▒░░░░ ▒▒▒░░ ▓████████████████\n" + "█████████████▓▓▓▓▒▒▓█▓▓▒▒▒▒▒░▒▓ ▓▒▓▒░ ░▓▒▒░▒░ ████████████████\n" + "████████████▓▓▓▓▒▒▒██▓▓▒▒░░▒░▒█ ▓▒▒▒░░ ▒▒▒▒░░░ ███████████████\n" + "████████████▒▒▓▓░▒▓█▓▓▓▒▒░░░ ░█░▒▒▒▒░░▒░░ ▓▒▒ ░░░ ▒██████████████\n" + "███████████▓▒▒▒▓ ▓▓█░▒▓▓▒▒░ █▒▒▓░▒▒░░░░░ ▒▒▒▒░ ░ ██████████████\n" + "███████████▒░▒▓ ▓▓██▓▓▓▒░ ▓▓░▓▒▒▒░░░░░ ░▒░▒▒░ ▓█████████████\n" + "██████████▓░▒▓░▓▓▒▓███▓▓▒░░ ▒█ ░▓▓▒░░░░ ░▒██▓▒▒▒ ██████████████\n" + "██████████▒▒▒▒██▓ ▒▓█▓▓▓▒░ ░▒██░ ▒▒▒ ░▒▓█████▒ ▒██████████████\n" + "██████████▒▒▓█████▒░▓▓▓▒░▒▒▓▓▓░ ▓█▓▒▒░▒▓███████████████████████\n" + "███████████████████▓░▒▒▒▓▓▓▒▓░ ░▓██████▓█████████████████████████\n" + "█████████████████████▓░▒▒▒▓███████████████████████████████████████\n" + "██████████████████████████████████████████████████████████████████" + "\nSem orgulho algum por tudo o que estes apetrechos viram juntamente com seus olhos voce os equipa.\n" + "já sob o velho manto e armadura, com seu antigo arma no suporte em suas costas\n" + "o unico desejo ecoa por todo seu corpo… VINGANÇA!"); } System.out.println(""); Som.finalMario(); System.out.println("PARABENS! voce acaba de concluir a primeira fase do jogo!.\n" + "1-INICIAR ATO 2 2-IR PARA O MENU INICIAL 3-VERIFICAR STATUS"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { Ato2 c = new Ato2(this.getJogador()); c.Enredo_2(); break; } if (resp == 2) { System.out.println("MOSTRAR STATUS"); break; } System.out.println("Numero invalido, tente novamente\n" + "1-INICIAR ATO 2 2-IR PARA O MENU INICIAL 3-VERIFICAR STATUS"); resp = e.nextInt(); } } public void inicializarJogador() { Especialidade arqueiro = new Arqueiro(); jogadorTeste = new Humano("Vagner", arqueiro); configuraDispensa(jogadorTeste.getClasseJogador()); } public void configuraDispensa(Especialidade classe) { dispensa = new Inventario(30); dispensa.adicionarItem(Itens.getCAMISA()); dispensa.adicionarItem(Itens.getJABUTICABA()); switch (classe.getDescricao()) { case "Guerreiro": dispensa.adicionarItem(Itens.ESPADA); break; case "Mago": dispensa.adicionarItem(Itens.CAJADO); break; case "Arqueiro": dispensa.adicionarItem(Itens.ARCO); break; default: break; } setDispensa(dispensa); } public Inventario getDispensa() { return dispensa; } public void setDispensa(Inventario dispensa) { this.dispensa = dispensa; } private void abrirInventario(Inventario inventario, String descricao) { descricao.toUpperCase(); System.out.println("------------------"); System.out.println("---" + descricao + "---"); System.out.println("------------------"); for (int i = 0; i < inventario.verTodosItens().size(); i++) { System.out.println("[" + i + "] - " + inventario.verItem(i).getDescricao()); } System.out.println("------------------"); System.out.println("Selecione uma ação: "); System.out.println("0 - Fechar"); System.out.println("1 - Guardar na mochila"); System.out.println("2 - Comer alimento"); System.out.println("3 - Mover para a mão"); Scanner selecionar = new Scanner(System.in); int acao = selecionar.nextInt(); if (acao == 1) { coletaDeItens(inventario); } else if (acao == 2) { System.out.println("Digite o número do alimento que deseja comer: "); Scanner alimento = new Scanner(System.in); int numAlimento = alimento.nextInt(); Item comida = inventario.pegarItem(numAlimento); try { jogadorTeste.comer((Comida) comida); System.out.println("Yummmm..."); System.out.println("Sua vida atual é: " + jogadorTeste.getbVidaAtual() + "/" + jogadorTeste.getbVida()); abrirInventario(inventario, descricao); } catch (Exception e) { System.out.println("Não foi possível comer o item."); inventario.adicionarItem(comida); abrirInventario(inventario, descricao); } } else if (acao == 3) { System.out.println("Digite o número do item que deseja mover para a mão: "); Scanner itemDeAtaque = new Scanner(System.in); int posicao = itemDeAtaque.nextInt(); System.out.println("Você tem certeza que deseja descartar o item atual para pegar o item " + inventario.verItem(posicao).getDescricao() + "(1 - Sim / 2 - Não)"); Scanner confirma = new Scanner(System.in); int resposta = confirma.nextInt(); if (resposta == 1) { try { Item itemAtaque = inventario.pegarItem(posicao); jogadorTeste.setItemDaMao((ItemDeCombate) itemAtaque); } catch (Exception e) { System.out.println("Erro ao pegar item. Verifique se é um item de ataque."); } } } } private void abrirMochila(Inventario inventario, String descricao) { System.out.println("------------------"); System.out.println("---" + descricao.toUpperCase() + "---"); System.out.println("------------------"); for (int i = 0; i < inventario.verTodosItens().size(); i++) { System.out.println("[" + i + "] - " + inventario.verItem(i).getDescricao()); } System.out.println("------------------"); System.out.println("Selecione uma ação: "); System.out.println("0 - Fechar"); System.out.println("1 - Comer item"); Scanner selecao = new Scanner(System.in); int acao1 = selecao.nextInt(); if (acao1 == 1) { System.out.println("Digite o número do alimento que deseja comer: "); Scanner alimento = new Scanner(System.in); int numAlimento = alimento.nextInt(); Item comida = inventario.pegarItem(numAlimento); try { jogadorTeste.comer((Comida) comida); System.out.println("Yummmm..."); System.out.println("Sua vida atual é: " + jogadorTeste.getbVidaAtual() + "/" + jogadorTeste.getbVida()); abrirMochila(inventario, descricao); } catch (Exception e) { System.out.println("Não foi possível comer o item."); inventario.adicionarItem(comida); abrirMochila(inventario, descricao); } } } private void coletaDeItens(Inventario dispensa) { int acao = 1; while (acao != 0) { for (int i = 0; i < dispensa.verTodosItens().size(); i++) { System.out.println("[" + i + "] - " + dispensa.verItem(i).getDescricao()); } System.out.println(" -- Digite o número do item que você deseja pegar:"); Scanner selecionar = new Scanner(System.in); int pegar = selecionar.nextInt(); try { Item itemPego = dispensa.pegarItem(pegar); jogadorTeste.adicionarItem(itemPego); System.out.println("Item adicioando à mochila!"); } catch (Exception e) { System.out.println("Esse item não pode ser pego!"); System.out.println("Motivo: " + e.getMessage()); } System.out.println("Deseja pegar outro item (1) ou fechar a mochila (0)?"); acao = selecionar.nextInt(); } } private Inventario getRecompensa(Inimigo inimigo, Especialidade classeJogador) { ItemAtaque item; Inventario recompensa = new Inventario(3); if (inimigo.getNome().equals("Goblin")) { switch (classeJogador.getDescricao()) { case "Guerreiro": item = (ItemAtaque) Itens.ESPADAMAGICA; break; case "Mago": item = (ItemAtaque) Itens.CAJADODEPAUS; break; case "Arqueiro": item = (ItemAtaque) Itens.ARCOARBALEST; break; default: return recompensa; } item.getInformacoes(); System.out.println("Parabéns, você encontrou uma " + item.getInformacoes() + ", um espinafre " + (Itens.ESPINAFRE) + " e uma Armadura Azul" + (Itens.ARMADURAAZUL)); recompensa.adicionarItem(item); recompensa.adicionarItem(Itens.ESPINAFRE); recompensa.adicionarItem(Itens.ARMADURAAZUL); return recompensa; } // FIM 1º LOOT if (inimigo.getNome().equals("Javali")) { switch (classeJogador.getDescricao()) { case "Guerreiro": item = (ItemAtaque) Itens.ESPADAJUSTICEIRA; break; case "Mago": item = (ItemAtaque) Itens.CAJADOESMERALDA; break; case "Arqueiro": item = (ItemAtaque) Itens.ARCOANTIGO; break; default: return recompensa; } item.getInformacoes(); System.out.println("Parabéns, você encontrou uma " + item.getInformacoes() + ", uma coxinha " + (Itens.COXINHA) + " e uma Camisa Longa" + (Itens.CAMISALONGA)); recompensa.adicionarItem(item); recompensa.adicionarItem(Itens.COXINHA); recompensa.adicionarItem(Itens.CAMISALONGA); } // FIM 2º LOOT if (inimigo.getNome().equals("Cervo")) { switch (classeJogador.getDescricao()) { case "Guerreiro": item = (ItemAtaque) Itens.ESPADAJUSTICEIRA; break; case "Mago": item = (ItemAtaque) Itens.CAJADOESMERALDA; break; case "Arqueiro": item = (ItemAtaque) Itens.ARCOANTIGO; break; default: return recompensa; } item.getInformacoes(); System.out.println("Parabéns, você encontrou uma " + item.getInformacoes() + ", uma coxinha " + (Itens.COXINHA) + " e uma Camisa Longa" + (Itens.CAMISALONGA)); recompensa.adicionarItem(item); recompensa.adicionarItem(Itens.COXINHA); recompensa.adicionarItem(Itens.CAMISALONGA); } // FIM 3º LOOT return recompensa; } public Raca getJogador() { return jogador; } public void setJogador(Raca jogador) { this.jogador = jogador; } }
src/atos/Ato1.java
package atos; import ItemJogo.Comida; import audio.Som; import static audio.Som.parar; import java.util.Scanner; import sun.audio.AudioStream; import ItemJogo.Inventario; import ItemJogo.Item; import ItemJogo.ItemAtaque; import ItemJogo.ItemDeCombate; import ItemJogo.Itens; import inimigos.AnaoIA; import java.util.ArrayList; import java.util.Scanner; import motor.EnumEspecialidades; import motor.EnumRacas; import raca.Anao; import raca.Elfo; import raca.Humano; import rpii.Arqueiro; import rpii.Especialidade; import rpii.Guerreiro; import rpii.Raca; public class Ato1 { private Raca jogador; private Raca jogadorTeste; private Inventario dispensa; private String nome; public static void main(String[] args) { Ato0 a0 = new Ato0(); Scanner input = new Scanner(System.in); System.out.println("Olá jogador, qual o seu nome?"); String nome = input.next(); System.out.println(nome + " Qual especialidade você quer ter?"); System.out.println(" 1 - Arqueiro - "); System.out.println(" 2 - Guerreiro - "); System.out.println(" 3 - Mago - "); int especialidade; especialidade = input.nextInt(); System.out.println("Então " + nome + " qual raça você gostaria de ser?"); System.out.println("1 - Anão - "); System.out.println("2 - Elfo - "); System.out.println("3 - Humano - "); System.out.println("4 - Orc - "); System.out.println("5 - Undead - "); int raca; raca = input.nextInt(); Ato1 as = new Ato1(a0.criarJogador(nome, EnumEspecialidades.values()[especialidade], EnumRacas.values()[raca])); as.abrirInventario(as.getDispensa(), "dispensa"); } public Ato1(Raca jogador) { this.jogador = jogador; inicializarJogador(); // Ato0 atinho = new Ato0(); // this.jogador = atinho.criarJogador(); // abrirInventario(getDispensa(), "dispensa"); } public long Enredo_1() throws Exception { Som.fase1(); System.out.println(""); System.out.println(jogador.getNome() + ", vive numa pequena vila hermitao, se exilou, passado obscuro voltando de uma cacada, avista fumaca vindo da vila proxima \n" + "onde estao todos os seus amigos, encontra um sobrevivente nos escombros, que lhe conta quem destruiu, vagamente, e morre() \n" + "foi a caravana... disse o sobrevivente usando toda sua energia que o mantinha vivo " + jogador.getNome() + " se lembra que eles estão buscando artefatos\n" + "para reviver inimigo pica que tem poderes de mandar em criaturas e esse inimigo so pode ser derrotado com armas imbuídas com uma \n" + "magia perdida dos antigos contos de taodistante agora, precisa achar pedacos de adamantium pra montar ritual de criacao dessas \n" + "armas sagradas! furioso, " + jogador.getNome() + " vai para sua casa se preparar para a sua jornada em busca de adamantium para conseguir sua vinganca\n" + " Mas tome CUIDADO! pois o caminho possui altos exercitos e alguns lideres q comandam a hierarquia da caravana"); fase_1(); return 0; } public long fase_1() throws Exception { Scanner e = new Scanner(System.in); System.out.println("." + ".#####...######..##...##..........##..##..######..##..##..#####....####..\n" + ".##..##..##......###.###..........##..##....##....###.##..##..##..##..##.\n" + ".#####...####....##.#.##..........##..##....##....##.###..##..##..##..##.\n" + ".##..##..##......##...##...........####.....##....##..##..##..##..##..##.\n" + ".#####...######..##...##............##....######..##..##..#####....####..\n" + "........................................................................."); System.out.println("Pois bem, então " + jogador.getNome() + " parte em sua longa jornada.\n" + "Nos arredores da vila Kenko, você descança, sem abrir os olhos voce pensa: acordo ou durmo mais um pouco?\n" + "1-SEGUIR DORMINDO 2-ACORDAR"); int resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { Som.galinha(); System.out.println("As galinhas cacarejam enlouquecidamente anunciando o novo dia, por vezes você sente vontade de dar cabo delas \n" + "devido a irritação sonora recorrente. Agora desperta por completo."); break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-SEGUIR DORMINDO 2-ACORDAR"); resp = e.nextInt(); } System.out.println("Abrindo os olhos, ainda meio confuso, percebe que esqueceu de apagar a vela, e agora o cheiro de cera invade o recinto.\n" + "░▒░░░░░░░░░░░░░▒░░░░░░░▒░░░░░░░░░░░░░░\n" + "░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░░▒\n" + "░▒░░░▒░▒░▒░▒░▒░▒░▒░▒░▒░▒░▒░░░▒░░░░░░░░\n" + "░▒░░▒░▒░▒░▒░▒░▒░▒▒▒▒▒░▒░▒▒▒░▒░▒░░░░░░░\n" + "░▒░▒▒▒░▒░▒▒▒░▒▒▒▒▒▒▒▒▒▒▒░▒░▒░▒▒▒░▒░░░▒\n" + "░▒░░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒▒░▒░░░░▒\n" + "░▒░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒░░░▒\n" + "░▒▒░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒░░▒\n" + "░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒░▒░▒░▒\n" + "░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓██▓▒▒▒▒▒▒▒▒▒░▒▒▒░▒▒\n" + "░▒░▒░▒▒▒▒▒▒▒▒▒▒▒▒▓███▓▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒░░▒░▒▒▒▒▒▒▒▒▒▒▒▒▓██▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▓█▓▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒▒░▒░▒▒▒▒▒▒▒▒▒▒░▓████░▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░▒▒▒▒▒▒▒▒▒▒▒░█████░▒▒▒▒▒▒▒░▒░▒▒▒░▒\n" + "░▒▒░░░▒░▒▒▒▒▒▒▒▒░████▓░▒▒▒▒▒▒▒▒▒▒░▒░░▒\n" + "░▒░░░▒░▒▒▒▒▒▒▒▒▒░████▓░▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒░░▒░▒▒▒░▒░▒▒▒░░██▓▓▓░░▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░▒░▒▒▒▒▒▒▒▒░░██▓▓▓░▒▒▒▒▒▒▒▒▒▒▒░░░▒\n" + "░▒░░▒░▒░▒░▒░▒▒▒░░██▓▓▒░▒▒▒▒▒▒▒▒░▒░▒░░▒\n" + "░▒░░░▒░▒▒▒░▒▒▒▒▒░██▓▓▒░▒▒▒▒▒▒▒▒▒░▒░▒░▒\n" + "░▒░░░░▒░▒░▒▒▒▒▒░░▓█▓▓▒░▒▒▒▒▒▒░▒▒▒▒▒░▒▒\n" + "░▒░▒░▒▒▒░▒▒▒▒▒▒▒░██▓▓▒░▒▒▒▒▒▒▒▒▒░▒░▒░▒\n" + "░▒▒░▒░▒░▒▒▒▒▒▒▒░░▓█▓▓▒░▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░░░░░░░▒░░▒░░██▓▒▒░▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒░░░░▒▒▒▒▒▒▒▒▒▒▒██▓▓▒▒▒▒▒▒▒▒▒▒░░░▒░▒▒\n" + " ░░░░▒▒▒▒░░▒▒▒▒▒░░░░░░░░░░░░ ░░░░░\n" + "░░░░░░▒▒▒░ ░▒▒▒▒░░░░\n" + "▓█████▓▓▒▒▒ ▒▒▒▒▓▓████\n" + "██████████████████████████████████████" + "\n1-APAGAR VELA 2-IR AO BANHEIRO"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("\n" + "░▒░░▒░▒▒▒▒▒▒▒▒▒▒▒▒▓██▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▓█▓▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒▒░▒░▒▒▒▒▒▒▒▒▒▒░▓████░▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░▒▒▒▒▒▒▒▒▒▒▒░█████░▒▒▒▒▒▒▒░▒░▒▒▒░▒\n" + "░▒▒░░░▒░▒▒▒▒▒▒▒▒░████▓░▒▒▒▒▒▒▒▒▒▒░▒░░▒\n" + "░▒░░░▒░▒▒▒▒▒▒▒▒▒░████▓░▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒░░▒░▒▒▒░▒░▒▒▒░░██▓▓▓░░▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░▒░▒▒▒▒▒▒▒▒░░██▓▓▓░▒▒▒▒▒▒▒▒▒▒▒░░░▒\n" + "░▒░░▒░▒░▒░▒░▒▒▒░░██▓▓▒░▒▒▒▒▒▒▒▒░▒░▒░░▒\n" + "░▒░░░▒░▒▒▒░▒▒▒▒▒░██▓▓▒░▒▒▒▒▒▒▒▒▒░▒░▒░▒\n" + "░▒░░░░▒░▒░▒▒▒▒▒░░▓█▓▓▒░▒▒▒▒▒▒░▒▒▒▒▒░▒▒\n" + "░▒░▒░▒▒▒░▒▒▒▒▒▒▒░██▓▓▒░▒▒▒▒▒▒▒▒▒░▒░▒░▒\n" + "░▒▒░▒░▒░▒▒▒▒▒▒▒░░▓█▓▓▒░▒▒▒▒▒▒▒▒▒▒░▒░▒▒\n" + "░▒░▒░░░░░░░▒░░▒░░██▓▒▒░▒▒▒▒▒▒▒▒▒▒▒░▒░▒\n" + "░▒░░░░▒▒▒▒▒▒▒▒▒▒▒██▓▓▒▒▒▒▒▒▒▒▒▒░░░▒░▒▒\n" + " ░░░░▒▒▒▒░░▒▒▒▒▒░░░░░░░░░░░░ ░░░░░\n" + "░░░░░░▒▒▒░ ░▒▒▒▒░░░░\n" + "▓█████▓▓▒▒▒ ▒▒▒▒▓▓████\n" + "██████████████████████████████████████" + "\nVela apagada!\n "); break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-APAGAR VELA 2-IR AO BANHEIRO"); resp = e.nextInt(); } System.out.println(jogador.getNome() + " vai até o lavabo atirar agua em sua face para dissipar a preguiça ainda remanescente, a agua está\n" + "um arrepio percorre todo o seu corpo, você encara-se no velho espelho e contempla sua triste feição\n" + "trazendo flashes das lembranças de seu passado de mercenário, tempos de matança em prol do atual reino.\n" + "Voltando a si, ainda contemplando sua face no reflexo:\n"); /* + "Qual será sua classe?\n" + "1-HUMANO 2-ELFO 3-ORC 4-ANAO 5-UNDEAD"); resp = e.nextInt(); while (resp != 1 || resp != 2 || resp == 3 || resp == 4 || resp == 5) { if (resp == 1) { break; } if (resp == 2) { break; } if (resp == 3) { break; } if (resp == 4) { break; } if (resp == 5) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-HUMANO 2-ELFO 3-ORC 4-ANAO 5-UNDEAD"); resp = e.nextInt(); } System.out.println("1-ESCOLHER NOVAMENTE 2-IR PARA O QUARTO"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("Qual será sua classe?\n" + "1-HUMANO 2-ELFO 3-ORC 4-ANAO 5-UNDEAD"); resp = e.nextInt(); while (resp != 1 || resp != 2 || resp == 3 || resp == 4 || resp == 5) { if (resp == 1) { break; } if (resp == 2) { break; } if (resp == 3) { break; } if (resp == 4) { break; } if (resp == 5) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-HUMANO 2-ELFO 3-ORC 4-ANAO 5-UNDEAD"); resp = e.nextInt(); } break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-ESCOLHER NOVAMENTE 2-IR PARA O QUARTO"); resp = e.nextInt(); }*/ System.out.println("Então você dirige-se para o quarto para equipar-se\n" + "Hora de comer alguma coisa\n" + "1-IR PARA A DISPENSA 2-CAÇAR SUA COMIDA"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { abrirInventario(getDispensa(), "dispensa"); System.out.println(jogador + " percebendo a pouca quantia de alimento em sua dispensa decide ir caçar na floresta"); System.out.println("Mochila!"); abrirMochila(jogadorTeste.getInventario(), "mochila"); break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-IR PARA A DISPENSA 2-CAÇAR SUA COMIDA"); resp = e.nextInt(); } System.out.println("Olhando para perto da porta da frente, encontra sua arma\n" + "1-SAIR DA CASA 2-PROCURAR POR COMIDA DENTRO DA CASA"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 2) { System.out.println("\n" + " ░▒▒▓▓▓▓▓▓▓▒▒░ \n" + " ▒▓▓███████▓▓▓▓▓▓▓███████▓▓▒░ \n" + " ▒▒▓▓███▓▓▒▒▒▒▒▒▒▒▓▓▓▓▒▒▓▒▓▓▓▓██████▓▒ \n" + " ▒▓██▓▒░▒░▓▓██████████████████████▓▓▓▒▒▒███▒ \n" + " ▒███▓░░▓███▓█████████████████████████████▓▓▓▓██▓▒ \n" + " ▒███░░▓██▓███▓▓█████▓█████████████████████████▓▓▓███▓ \n" + " ███▒░▒▓███▓███▓▓█████▓███████████████████████████▓▒▒███░ \n" + " ███░░██▒███▓▓███▓█████▓█████████████████████████████▒░███▒ \n" + " ███▒▒█▒██▒███▓███▒█████▓█████████████████████████████▓▓▒███░ \n" + " ▓███▒█▓▓▓█▓▓██▓▓██▓█████▓▓█████████████████████████▓█▓██░████ \n" + " ▒▓▒▓▒ ░▓▓▓▓▓█▒██▓▓██▓▓▓▓▓▓▓▓█████████████████████████▓█▓█▓░▒▒▓▓ \n" + " ▒▓▓ ███░▓▓▓▓▓██▒██▓▓▓▓▓█████▓████████████████████████▓▓▓█▓▒ ▒██▓ ▒ \n" + " ░█▓▓ ▓██▒ ▓▓▓▓▒█▓▓▓▓▓██▓██████████████████████████████▓█▓▓▒ ░███░▒▓▒ \n" + " ▒▓▓█▒ ███░ ▒▓▓▓▒▓▒██▓███▓███████████████████████████▓▓█▓▓▓░ ███▒░▓▒▓░ \n" + " ▒█▓▓█▒ ▓██▒ ░▒▓▒▒█▓██▓███▓▓░ ░▒░▓▓▓▓████████████████▓▓█▓░ ███░▒▓▓▓▓▒ \n" + " ▒▓▓▓█▓█▓ ▒███▒ ░▒▓▓█▓██▓▒ ▒▒▒░ ░ ░ ░▒▒▓▓██████████▓▒▒▓██▓░▓▓█▓▓█▓ \n" + " ░ █▒▓█▓▓███▒░▒███▒░▒▒▒▓▓▓█▓▓▓███████████████▓▓████▓██▓▓▓▒▒▓██▒░▒▒██▒█▓▓▒░ ░ \n" + " ▒░▒▓█▓▒▓██████▒░▒▓███▓▒░░███████████████████████████▓▓▓▓█▓▓▓░▒▓██▓▓██▓█▒▓▓ ▒ \n" + " ▓░▓▓█▓▓▓███▓▓████▓▓▓▓▓▓▓▓▒▒▒▒▒ ░▒▒░░▒▒▒▓▓▓▓▓▓▓▓▓▓█▓▓▒▓▓▒▒▒▒▓▓▓████▓▓██░█▓█▒▒▒ \n" + " ░▓░▓█▓▓█▓▒▓▓▓████████▓▓▓▒▒▒▒▒▒▒░░▒░░░░ ░░░░ ░░▒▒▓████▒▓████▓▒▓▓▓█▓▓░▓ \n" + " ▓▒▒▓█▓████▒▒▓██████████████▓▓▒▒░░░ ░░▒▒▓▓███████████▒███▓▒░██▓█▓▒█▓░\n" + " █▓▓ ▓████▓██▓▒▓▓█▓█████████████████████████████████████▓███████▓▓▓▒▓█▓▓█▓▒█░█▒▓\n" + " █▓▓▒ ███▓▓████▓▓▒▒▓▓███████████████████████████████████▓▓████▓▓▒▒▓████▒▓██░▓▓▓▒\n" + " ░██▓▒ ▓█▓████████▓▓▓▒▓▓▓██████████████████▓█████████████▓▓▓▓▒▓▓█▓▓████▓▓█░▓▓░█░\n" + " ██▓▓▓▒░█████████████▓▓▓▓▒▓▓▓▓▓▓██████████▓██████▓▓▓▓▓▓▓▒▓▓██████▒█████▒░▓▒▓█▓ \n" + " ▓██▓▓▓▒▓████▓███████████▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓█▓▓███████▓▓██▓░▒▓▓▓▓█ \n" + " ▓██▓▓▓▒▒▓▓▓▓██████████████████████▓▓▓▓▓▓▓█████████████▓████████▓▓▒▒▒▓▒███ \n" + " ░▓██▓▓▓▓▓▓▓███████████████████████████▓██████████████▓██████▓▒▒▒▒▓▓███▓ \n" + " ▒████▓▓▓▓▓▓▓▓▓██████████████████████▓██████████████▓▓█▓▒▒▒▒▒▓▓████▓ \n" + " ░▓████▓▓▓▓▓▓▓▓▓▓▓█████████████████▓███████████▓█▓▓▒▒▒▒▒▓▓█████▒ \n" + " ░▓██████▓▓▒▓▒▒▒▓▓▓▓▓▓██████████▓███▓▓▓▒▒▒▓▓▒▒▒▒▓▓▓██████▒ \n" + " ▒▓███████▓▓▓▒▒░░▒▒░░░░░░▒▒▒░░░▒▒▒▒░▒▒▒▒▓▓███████▓▒ \n" + " ░▒▓█████████▓▓▓▓▒▒▒▒▒▒▒▓▒▓▓▓▓██████████▒▒░ \n" + " ░▒▒▓▓▓████████████████▓▓▓▒▒░ "); System.out.println("Voce nao encontra nada, sua barriga esta roncando e sua saude diminuindo, entao sai da casa."); break; } if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-SAIR DA CASA 2-PROCURAR POR COMIDA DENTRO DA CASA"); resp = e.nextInt(); } Som.porta(); System.out.println("\n" + " \n" + " ░ \n" + " ░ ░▒ \n" + " ░░ ▒░ \n" + " ▒ ░ \n" + " ▓ ▒▒ \n" + " ▒█▓▓█▓ \n" + " ░▒░░░▓█ ░ \n" + " ▒▒▒▒█▓ ░░ \n" + " ▒▒▒▒█▓ ░░ \n" + " ▒▒░░█▓ ░ \n" + " ░▒▓▓▓▓▓▓▓▒█▓ ▒ \n" + " ▒▓████████████▓░ ▒█▓░ \n" + " ░███▓▓▓▓███████████▓▒░░ ▒▒▓▓ \n" + " ▓█▓▓▓████▓█▓███▓███████████▓░ ▒▒▓▒ \n" + " ░█▓ ▒▓███▓▓▓█▓▓▓█▓███████████ ▒▒▓▒ \n" + " ░█▒ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███████▓ ░▒▓█▓███▓ \n" + " ░█▒ ▒▓▓▓▒▒░ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████▓█▓████▓ \n" + " ░█░ ░▒▓█▓▓█▒ ▒█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████░ \n" + " ░█▒ ░░ ▒████▒ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▓ \n" + " █▒ ░░░▒▓▓██▒ ░ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████▓ \n" + " ▓▓ ░░▒░░▒▓▓▓▓░░░░ ▒▓▓▓▓▓▓▓▓▓▓█▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████ \n" + " ▓▓ ░▒▒▒░░▒░░░░░▒░░ ▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███▓██ \n" + " ▓█ ▒▒▒▒░░░░░░░░▒▒▒▒░ ▓▓▓▓▓▓▓▓▓▓▓▓▓█▓███▓▓▓▓▓▓▓▓▓▓▓▓▓█▓ ██ \n" + " ▒█░ ░▒▒▒▒░ ░▒░░░ ▒▒▒▒░ ░▓▓▓▓▓▓▓▓▓▓▓███████▓▓▓▓▓▓▓▓▓▓▓██░ ░ ██ \n" + " ▓▒ ░░▒░░░▒▒▒▒▒▒▒▒▓░░░░░░ ░▓▓▓▓▓▓▓█████▓░░▒▓███▓▓▓▓▓████▓▒▒▒▒ █▓ \n" + " ░ ░░▒▒▒░░█▓▓██████▒ ▒▒░░░▒░▒ ▒███░░░ ░▒▒ ▒▓░████▓▒▒▓▓▒███▒▒░▒█ \n" + " ▒▓░░▒▒░▓▓▓▓▓▓▓▓█▒░▒░░▒░▓░ ░██▓ ░ ░██░ ░ ████ ░▓▒▓█▓▓▓▓ \n" + " ▒▓▒░░▒░▓▒▓▓▓▓███▒░░▒▓▒▒▓░ ░▓▓▓░░░░░██▒░ ░░██▓▓░░░▒▓▓▓▓▓▓▓▒ \n" + " ▒▒▒▒░░░█▓▒▓▓████▒ ▒▒▒▒░▓░ ░▓▓▓░░▒▒░▓█░░░░░▒██▓▓░▒░▒▓▓▒▓▓▒▓▓ \n" + " ░▒▒▒▒░▒▒▓▒▓▓▓▓▓▒░░▒░▒▒▒▓▓▓█▓▓▒▒░░▒▒░░░░░░░░░░░▒▓▒▒░░▓▒▓▓▓▓▓█▒ \n" + " ░▓▒▓▓▒▓▓▒▓█▓▒▓▓▓▒░ ▒▒▒▓▒▓▓▓▓██▓▒ ▒▒▒░░░▒ ░░░▒▓▓▓▓███▓▓███████░ \n" + " ░▒▓▓▒▓▓▒▒▒░▒▒░░░▒▒▓▓▒▒▓▒▒▒▒░░▒▒▓▒░░▒▒░░░░░░▒█▓▒▒▒▒▒▒▓▒▓█▓▒▓▓▓████▓░ \n" + " ▓▓▓▒░▒▒░░▒▒░░░░ ░▒░▒▓▒▒▒▒▓▒▒ ░░▒▓▓▓▒▒▒░░░░░▓▓▓▒▒▓▒▒▒░▒▒▒▒░░ ▒░▒▓██▓ \n" + " ░▒▒░░▒▓▓▒▒▒▒▒░ ░░░▒▒░ ░▒▓▒░░░ ░▒▒▒▒░▒▓▓▒▒▒▒▒▓▒ ░░ ░ \n" + " ░ ░ " + "\nAo sair, vislumbra seu pequeno casebre de madeira cedido pelo ancião da vila após voce salva-lo em certa ocasião\n" + "este imóvel é sua morada a alguns anos, sempre solitariamente acolhendo seus pensamentos e seu corpo cansado.\n" + "Virando-se para a densa floresta de Lavitan\n" + "1-SENTAR UM POUCO 2-ENTRAR NA FLORESTA"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println(jogador.getNome() + " recupera um pouco de sua saude durante seu descanso, depois disso adentra a floresta de Levitan."); break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-SENTAR UM POUCO 2-ENTRAR NA FLORESTA"); resp = e.nextInt(); } Som.entradaFloresta(); System.out.println("\n" + "█████████████████████████████████▓ █████████████████████████████████████████████\n" + "████████████████████████████████▓▓▓▓████████████████████████████████████████████\n" + "███████████████████████████████▒▒▒▒▒▒▒███████████████████████████▓██████████████\n" + "██████████████▒░█████████████▓▒▒▒▒▒▒▒▒▒▓████████████████████████▒▒▒█████████████\n" + "████████████▓▒▒▒▒▓██████████▒▒▒▒▒▒▒▒▒▒▒▒▒▓███████▓█████████████▓▓▓▒▒▓███████████\n" + "███████████▒▒▒▒▒▒▓▓███████▓▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒█████▓▒▒▓██████████▒▓▓▒▒▓▒▓██████████\n" + "██████████▒▒▒▒▒▒▒▒▓▒▓███▒░░░░▒░░░▒▒▒▒▒░▒░▒░ ░██▓▒▓▒▒▒████████▒▓▒▒▒▒▒▒▒▒█████████\n" + "████████▓▒▒▒▒▒▒▒▒▒▒▒▒▓██▓▓▒▒▒▒▒▒▒░░░▒▒▒▒▒▒▒█▓▓▓▒▒▒▒▒▒▒▓████▓▒▒▒▒▒▒▒▒▒▒▒▒▒███████\n" + "██████▓▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▓█▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▒▒▒▒▒▒▒▒▒▓█ ░▒░▒▒░▒▒▒░▒▒░ ░▓████\n" + "█████░░░▒▒▒▒▒░▒▒▒░▒▒▒░░░▒▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒████\n" + "█████▓▓▓▒▒▒▒▒▒▒▒▒▒▒▓▒ ░▒▓▒ ▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ░ ▒░▒▒▒▒▒▒▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓██████\n" + "████▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒░▒▒▒▒▒░▒▒░▒▓▒ ▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░░████\n" + "██░ ░▒░░▒▒▒▒▒▒▒▒▒▒▒▒▓▓ ░▓▓▒▒░▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▓▓▒▒▒▓▒▒▒░░▒▒░▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒███\n" + "██▓███▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ▒░ ▒▒▒▒▒░▒▒▒▒▒░░▒▒▒▒░ ▒▒▒▒░▒▒▒▒▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓████\n" + "████▓░▒░▒░░▒▒▒▒▒▒▒░░▒░░░░▓▒▒▒▒▒▒▒▒░▒░▒▒▒▒▒▒▒ ░░░░▒ ░▒▓▒░▓▒░▒░░░▒▒▒▒▒▒▒░▒░▒▒▒ ▒▓█\n" + "███▓░▒░░▒▒▒░░▒▒▒░░▒▒▓▓░▒▓▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▒▒▒▓▒ ▒▒▒▒▓▒▒░ ░ ▒▒▒▒▒▒░▒▒▒░░▒▒▒▒ ░▒▒░ ▒\n" + "█████▓▓▒▒▒▒▒▒▒░▒▒▒▒▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒░░▒▒▒▒░▒▒▒▓▓▒▒▒▒▒▒▒░░▒▒▒▒▒▒▒░▓████\n" + "███▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒░░▒▒▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░███\n" + "██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒░░ ▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓ ░▒▒ ▒▒▒▓▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░ ░█\n" + "▓ ░░▒░▓▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▒░▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ▒▒▒▓░ ▒▒░ ▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓░░█▓▒░▒\n" + "▓▓███▒▓▒▒▒▒▒▒▒▒▒▒▒▓▓▒▓░▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▓ ░▒▒▓▒▒▒▓▒░▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ █████\n" + "████▒▓▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒░▒▓▒▒▒▒▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒ ▒▒▒▓▓▒▒ ▓▓▒▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒ ▓███\n" + "███▓▓▓▒▒░░░░░░░▒▒▒▒▒░░█▒▒▒▒▒▒▒ ░░░▒▒▒▒▒▒▒▓░ ░░░▒░ ▒▓░▒▒▒▒░ ░░░░░░▒▒▒▒▒▒▒ ▓██\n" + "██▓░▒░░ ░░▒ ░ ▓▒░░░░░░▒ ░░░ ░ ▒░░░░░▒▓ ░▓░░░░░▒░ ░ ░ ░░░░░░▒▒ ██\n" + "██░░ ░▒▓▓▒▒ ░▒▓▒░ ▒▓ ░ █▓░ ▓▓▓▒░ ░░▒░░▒ ▒ ▒▒░ ░░░▒▒▒▒░ ▒▒▓▓░ ▓▒▒░░▒▒░ ▓█\n" + "██▒▓█████▓ ▒▓▓▓▓ ███████████ ░▓▓▓▓▓ ████████▒ ▒▓▓▒ ███████▒ ░▒▓▓▓░▓████████▓██\n" + "██████▓▓███ ░▒▓▓▓ ████████████▒ ▒▒▒▓▒ ████▓████ ▓▓▓▒▒█████████ ▒▓▓▓░░████▓██████\n" + "▓████▓▒▒▓██ ▒▒▒▒▒ ▓▒▓████▓▓░██▓ ▓▒▒▒▒░▓█▓▒▒▓███ ▓▒▒░▒██▓ ██▓██ ░▒▒▓░▓▒▓██ ▒█████\n" + "▒ ▒█▓▒▒░▒▒▒▒▒▓▒▒▒▒▒ ▒▓▓▓▒░▓▒▓▓▒▒▒▓▒▒▓▓▒▒▒▒▒▒▓█ ▒░▒▒▒▓▒▒ ▓▒ ░▓ ▒▒▒▒▒▓▒▒▓▒▒▒▒▓▓▒▒\n" + "██░░▒▒░▒░▒▒▒▒▒▒▒▒▒▒░ ░▒░░░▒▒░░▒░▒░▒▓▒░▒▒▒░░▒▒▒░ ▒▒▓▒▒▒▒░▒▒▒▒▒▒▓▒ ▒▒▓▒░░░▒▒░▒▒ ▒█"); System.out.println("\nvoce caminha calmamente por uma pequena trilha, ela mal pode ser vista pois a vegetação muito densa\n" + "mas sua notável experiência ela se torna apenas uma trilha normal, seguindo mais adiante voce depara-se com\n" + "uma bifurcação.\n" + "Ao norte leva para uma grande elevação, a leste, a segunda trilha entra em um terreno mais baixo e umido.\n\n" + " \n" + " ▒▒░ \n" + " ░▒▒▒ \n" + " ░▒▓▒ ▒▒▒░░░ \n" + " ░▒▓▒ ░▒▒▒▓▓▓▒▓▒▓▓▓░\n" + " ░▒▒▒ ░▒▒▒░▓▒▒▒ ▒▒▓▒ \n" + " ░░▒▒▒░░ ░▒▓▒ ░▒▒▒▒ ░░ ░▒▒▓▓▒ \n" + " ▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░ ▒▒░ ░░▒▒▒▒▒▒▒▒▒▒▓▓▓▒░ \n" + " ▒▒░░░ ░░░▒▒▒▒▒▒▒░░░ ▒░▒▒▒░░░░░░▒▒▒▒░ \n" + " ░▒░ ░▒░▒▒░▒▒░░░░░▒▒▒ ░▒▒▒▒▒▒▒▒░░░░ \n" + " ░░░▒▒▒░░░░░░░▒▒▒▒▒▒░░▒▒▒ ▒▒▒▒▒▒▒▒▒▒▒░ \n" + " ░░░▒▒▒▒▒▒▒▒▒▒ ░▒▒░ ▒░░░ ▒▒▒▒▒▒▒▒▒░ \n" + " ░▒▒▒▒▒▒░░▒▒▒▒▒░░░▒▒▒▒▒▒▒▒ ░░░▒▒▒▒▒▒▒▒▒ \n" + " ░▒▒░ ░ ░ ▒▒▒▒▒▒░▒▒ ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒ \n" + " ▒▒▒▒░▒░ ▒▒▒▒▒░░ ▒▒▓░ ░░▒▒▒▒▒▒▒▒▒░ \n" + " ░▒▒▒▒▒▒▒▒▒▒░ ░▒▒▓▒ ░░▒ \n" + " ░░░ ░▒▒▓▒ \n" + " ░▒▒▓░ \n" + " ░▒▒▓░ \n" + " ░▒▒▓ \n" + " ░▒▒▒ \n" + " ░▒▒▒ \n" + " ░▒▒▒ \n" + " ░▒▒▓ \n" + " ░▒▒▓░ \n" + " ░▒▒▓░ \n" + " ▒▒▒▒▒ \n" + " ▒▒▒▓▒ \n" + " ░▒▒▒▒▒ \n" + " ░▒▒▒▓▒ \n" + " ▒▒▒▒▒ \n" + " ▒▒▒▓░ \n" + " ░▒▒▓░ \n" + " ▒ ▒▒▓ \n" + " ░█▒█▒ ░▒▒ \n" + " ▓▓██▓ ▒▒▒ ▒▒▒░ \n" + " ▒▓▓▓█▒ ▒░░▓▓█▓▒ \n" + " ░▒▒▓▓▓ ▒▒████████ \n" + " ▓▓▓▓██ ░▒▒▓███████ \n" + " ▒█▓▓▒ ▒▓▒ "); System.out.println("1-IR PARA O LESTE 2-IR PARA O NORTE"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { irLeste(); break; } if (resp == 2) { irNorte(); break; } System.out.println("Numero invalido, tente novamente\n" + "1-IR PARA O LESTE 2-IR PARA O NORTE"); resp = e.nextInt(); } System.out.println(""); System.out.println("PARABENS! voce acaba de concluir a primeira fase do Ato1.\n" + "1-INICIAR FASE 2"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("Mochila!"); abrirMochila(jogadorTeste.getInventario(), "mochila"); fase2(); break; } if (resp == 2) { System.out.println("Mochila!"); abrirMochila(jogadorTeste.getInventario(), "mochila"); System.out.println("MOSTRAR STATUS"); break; } System.out.println("Numero invalido, tente novamente\n" + "1-INICIAR ATO 2 2-VERIFICAR STATUS"); resp = e.nextInt(); } System.gc(); return 0; } public long irLeste() throws Exception { Scanner e = new Scanner(System.in); int resp; System.out.println("Seguindo adiante neste caminho a vegetação diminui de tamanho, mas nao perdendo em densidade por parte das pequenas vegetações\n" + "voce sente em seus passos que o solo esta ficando cada vez mais umido e mole por certas vezes seus pes afundam na lama\n" + "caracteristica de terrenos alagados.\n" + "" + " \n" + " \n" + " \n" + " \n" + " \n" + " ░░ ░░ ░▒▒▒▒ ░░░░░ \n" + " ░░░░░░▒░▒▒░▒▒▒▒▒▒▒▒░░░░░░ \n" + " ░▒▒░░▒▒▒▒░░▒▒▒▒░▒░▒░▒▒░░░░░ \n" + " ▒▒░ ░▓▒░▒▒▒░▒▒░▒▒▒▒▒▒░░▒▒▒░▒▒░ \n" + " ░▒▒▒▒▒░▒▒▒░ ▒▒░▒░▒▒▒░░░░░▒▒▒▒▒▒▒░░░▒▒▓▒ \n" + " ░▒▒▒▓▒▓▒▒░ ▒▒▒▒░▒▒▒▒░░░░░▒▒▒▒▒▒░░░░░▒▒▒▒▒ ░ \n" + " ░▒▒▒▒▒▒▒▒▒▒▒▒░░▒▒░▒▒▒▓▒▒░▒▒░▒▒▒░▓▓░▒░░░░░▒▒▒▒▒▒▒░▒░ \n" + " ▒▓▒▓▒░▒▒▒░░▒▒▒▒░░▒░░▒▒░░▒▒▒▒▒▒░▒░▒▒▓▒▒▒░░▒▒▓▒▓▓▒▒▒▒▒▒▒░░ \n" + " ░▒▒░▒▒░▒▓▓░▒▒░▒▓▒▒░▒▒░▒▒░▒▒▒░░▒▒░▒▒▒▒▓▓▒▒▒░▒▒▒▓▒▒▓▒▒▓▒▒▒▒▒▒ \n" + " ▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▓▒▒░░░▒▓▒▒▒▒░░▒░░▒▒▒▒▒░░▒░▒▒▒▒▒▒▒▓▓▓▒▒▓▓▒▒▒▒▓▓░ \n" + " ░▒▒▓▓▓▓▓▒░▒▒▒▒░▒▒▒▒▒░░░▒▒▓▒░▒░░▒▒░▒▒▒░▒▒▓▒▒▒▒▒▒▒▒ ▓▓▓▒░▒▓▒▒▒▒▓▓▒ \n" + " ▒▒▒▒▒ ░░░░ ░▒▒▒▒▒▒▓▓▒▒▓▓░▒▒░▒▒▒▒░▒▒░ ▒▒░▓▓▓▒▒▒▒░▒▓▒▒▒▒▒▒░░▒▒▓▓ \n" + " ░▒▒▒░░░▒░░▒▒▒▒▒▓▒▒▓▓▒▒▓▓▓█▓▓▓▓▓▒▒▒░░▒░░░▒▒░ ▒▒░▒▓▒░░▓▒▒░▒▒▒░▒▓▒░▒▒▒▓▓ \n" + " ▒▒▒▒▒▒▒▒▒▒▒▓▒▓▓▓█▓▓▒▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒░░▒░▒░▒▒░░▒▒▒▒▓░▒░▒░▒░░▒▒▒░░▒▓▓▓▒▒ \n" + " ░░▒▒▒▒░▒▓▓▒░▒▓▓▓▓▓▓▓▓▓▓▓▓▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒░▒▒░▒▒░▒▒ ░░▒▒░░▒▒░ ▒▓▓▓▓▒ \n" + " ░░ ░░░▒░ ▒▓░▒▓▓▓██▓█▓█▓▓▓▒▒▓▓▓▓▓▒▒▒░▒▒▒░▒▒▒▒▒▒▒▒▒░░░░░░ ░▒▒░░▒░░░░░▒░░▒▒▒▒▒▒▒▓▒░ \n" + " ▒▓▓▒░▒▒░ ░█▒░▓█▒▓██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒▒▒░░▒▒▒▒░░░░░░░ ░░▒░▒▒░░░ ░░░░░▒▒▒▒ \n" + " ░▒▒░░░░ ██░ ░░ ▒░ ▒░░▒▒▒▒░▒▒▒▒░▒▒▒░▒░▒▒░░░░░▒▒░▒▒▒░░░░░░▒░░░ ░ ░░░░░░▒▒ \n" + " ░▒░ ▒▓░██▓░▒ ░░ ░░░▒░░░░░░░░ ░░░░░░░░░░▒▒▒▒░ ░░ ░░░ ░ ▒░░░▒▒ \n" + " ▓▓ ▒▓▒▒░░░░ ░░░░▒░▒░▒▒▒▒░ ░░ ░▒▒░▒░ ░░░░░ \n" + " ▓▒ ░░░░░░░░▒░▒░░ ▒▒▒░ ░░▒░ \n" + " ░░░░░░▒▒░ ▒░░ ░▒░ \n" + " ░ ░░░░▒▒░ ░▒░ ░▒░ \n" + " ░▒░░ ░░▒▒▒ ░░░ ░▒░ \n" + " ░░░ ░░░▒░ ░▒ ░▒░ \n" + " ░░ ░▒▒░░ ░░ ░░ \n" + " ░░ ░▒░ ░░ ░░ \n" + " ░░ ░▒░ ░░ \n" + " ░░░ ░░ \n" + " ░░░ ░ \n" + " ░░ \n" + " ░░░ " + "Ao longe ouve o som da vegetação sendo pisada ou mexida observando calmamente voce avista um imponente javali\n" + "os pelos escuros dão um tom sombrio a fera, contrastando com suas presas brancas como o leite, suas patas e focinho estão cobertas de lama.\n" + "com seu equipamento em maos, e seu instinto de caça agucado, prepara-se para o ataque no desavisado animal.\n" + "aproximando-se sorrateiramente por trás da criatura, segurando firmemente sua arma e tentando fazer o minimo de barulho possivel."); System.out.println("1-ATACAR FEROSMENTE 2-ATACAR SILECIOSAMENTE"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { Som.javali(); System.out.println("A criatura percebe seu movimento e começa a fugir, com muita dificuldade voce consegue alcansa-la"); break; } if (resp == 2) { Som.javali(); break; } System.out.println("Numero invalido, tente novamente\n" + "1-ATACAR FEROSMENTE 2-ATACAR SILECIOSAMENTE"); resp = e.nextInt(); } Som.cervo(); System.out.println("com um rápido e súbito movimento você lanca-se sobre a fera que num grunhido de dor e aflição cai por terra\n" + "ao lado, voce solta sua bolsa e empunha sua pequena adaga para dilacerar a fera e coletar sua carne e seu couro"); System.out.println("1-ESFOLAR O ANIMAL"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-ESFOLAR O ANIMAL"); resp = e.nextInt(); } System.out.println("você começa a descarnar o animal, coletando o conteudo da caçada."); Som.esfolar(); AnaoIA javaliLeste = new AnaoIA("Javali", new Guerreiro()); Inventario loot = javaliLeste(javaliLeste.getClasseInimigo()); abrirInventario(loot, "Javali"); return 1; } public long irNorte() throws Exception { Scanner e = new Scanner(System.in); int resp; System.out.println("\n" + " \n" + " ░░ \n" + " ▒▓ \n" + " ▒▓▓ \n" + " ░▓▓░ ░ \n" + " ░░▓▓ ▒ \n" + " ▒▓██▓▓ \n" + " ░▓▓▓▓▓▒▒▒░ \n" + " ░▒▓▓▒██▓▓▒ \n" + " ░░░▒▓▓░░▒░░ \n" + " ▒▓▓██████▓▓▒░ ░ ▒▓▓█▓▓▒▒▒▒▒░ \n" + " ▒████████████████▓░ ██▓ ░░▒▒░██▒▓▓▓░ \n" + " ▓███▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▓▓▒▒▒█▓ ▓██▒ \n" + " ░███▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓▒███▒▓█▒ ▒▒▓▓▓▒▒▓█░▓▒▒▒░░░░░ \n" + " ░██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓░▓█████▓▒▓▓░▒▓██▓▓▓▓▓█▓▒▓▓▓███▓▒░ \n" + " ██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██░░█████████▒▒▓▒██▓▓█▒▒▓▓██▒▒ ░▒░░▒░ \n" + " ▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓░▓████████████▒▒▒▒▒▓▒ ▓█▓░▒▓▒░ \n" + " █▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▒▒████████████████▓▒▒▒▓▒███░▒▒▒▓█▓▓▒▒░░ \n" + " ▒█▓▓▓▓▓▓▓▓▓▓▓▓█▓███▓▒▒█████▓█████▒▓██▒▒█▓▒▓▓▓████▓▓█▒████▓▒░░ \n" + " ▒█▓▓▓▓▓▓▓▓▓▓██▓▓▓▓▒▒▒▒▒▒▓██▓░▓▓██▒░▒▓▓██▓▓▒▓▒░████░▓▓▓▒▒▒░ \n" + " ░██▓▓▓▓▓▓▓▓██▒▒▒▒▒▒▒▒▒▒▒▒░▒▓▒▒▒▒▓█▒▒▒▓▒▒▒▓▒▒▒▒█▓██░ \n" + " ██▓▓▓▓▓▓██▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒▒██▒██░ \n" + " ▒█▓▓▓▓██▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒██▒▓▓▓▓▓▒▒░ \n" + " ▓█▓▓██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▒▒██░▒███▓▒▓██▓▓▒░ \n" + " ▒██▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓█▓▒░█▓▒▒▒▓██▓░░ \n" + " ▒█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓██▓▓▒▒▒███░▒▒▒▒▓██▓▓▒░▒▒▒ \n" + " ▒█▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▒▒▒▒▓███▓░▒███▒▒▓▓▓▓▓░ \n" + " ▓█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓█▓▒▒▓█▓▒▓███████▒█▓▒▒▓▓▓▓▓▒░\n" + " ▓█▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▒▒▒▒▒▓▓▒▓█▒▒░▓▓▒▓▓▒▓▓▒▒ \n" + " ▒██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▒▒▓▒▒▓▓▓▓▓▓▓▓▓▓▓█▒ " + "\nCom muitas rochas entre as grandes arvores e arbustos de variados tamanhos, avancar por aqui se torna por vezes bastante dificultoso\n" + "tendo que frequentemente escalar ou desviar das grandes rochas. Mais alguns minutos de caminhada voce se depara com uma pequena falésia\n" + "pelo seu tamanho reduzido ainda é possivel escalá-la, mas ainda assim esta acao pode vir a ser perigosa caso algo de errado.\n" + "O pensamento de contorná-la ou encontrar um ponto melhor para escalada passa por sua mente."); System.out.println("1-ESCALAR 2-CONTORNAR"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("Utilizando de suas grandes capacidades físicas escalar tal elevação se torna brinquedo de criança mas ainda assim o instinto\n" + "diz a voce para seguir com cuidado, nunca se sabe quando uma pedra solta ou lisa ou sem aderencia suficiente vai ser a proxima a ser\n" + "escolida como alavanca. Ao terminar de subir voce avista a sua frente mais claridade que o normal para o meio de uma floresta."); System.out.println("1-AVANÇAR 2-VOLTAR E CONTORNAR"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("" + " \n" + " ▒ ░ ▒░ ░██ \n" + " ░█░ █░ ░ ░█▓ ██ \n" + " ▓█ ▒█ ▒ ░█ ▓█ ██ \n" + " ██▓███▓ ████████▓█▓░ \n" + " ▓█████▓▓▓▓████▓▒▒ ░░░▒ \n" + " ▒▒ █████▓▓███▓ \n" + " ░██████████▓ \n" + " ░▓██████████▒ \n" + " ▓███████████▓ \n" + " ░▒░ ▓██████ \n" + " █████▓▓░ ░▒▒▒░ ░░▒▒▒▓▓▓▓████▓▒ \n" + " ██▒░ ▓█████████████████████████████░ \n" + " ░▓██░▒████████████████████████████████ \n" + " ████████████████████████████████████▒ \n" + " ████████████▓████████▓▒▒██████████▒█▒ \n" + " ▒███████████████████▓▒▒▓████████▒ ▓█ \n" + " ▓█▓▒██████████████████████████░ ░ \n" + " ▒█ ████▓████████████▒ ▓██████ \n" + " ███▓░▒▒▒▓▓▒▒▒▓█ ░ ░█████▒ \n" + " ███░░ ░██▓▓░████▒ \n" + " ▒▓ ██░ ░███▒ ▒███▒ \n" + " ███░ ░ ██ ▓░ ███▒ ███ \n" + "▒█▓▓▓ ▒▒ █▒ ▓░ ▓█▒ ░█▓ \n" + "▒▒ ▓█░▓ ▓█ █ █▓ █▓ \n" + "▒░ ▓███ ░ █▓ █▓ ██ █▓ \n" + "▒▓ ▓▓█▓ ▓░ ░ ▒░░ ██ ██ ▒█░ ██ \n" + "▒█▒▓░███▒ ▓░▓▓▒ ██ ██ ░██ ▒ ░██ ░\n" + " ▓█▓░███▒▒░ ░▒▒▓ ▒█▒ █▓ ░ ▒█▒ ░ ███░▒██ ░\n" + " ░▓ ▒░ ░▒▒ ░░ ░ ▒▒░ ░ " + "\nno centro da clareira agora avistada totalmente esta um majestoso cervo, com seus grandes chifres e seu andar despreocupado.\n" + "voce ja consegue sentir o sabor de sua suculenta carne em sua boca. E com este pensamento prepara-se para o ataque."); System.out.println("1-ATACAR"); Som.cervo(); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { System.out.println("O cervo mesmo ferido ainda tenta correr para o abrigo da floresta, mas antes de atingir seu objetivo tomba, levantando leivas de grama com sua pesada galhada."); } System.out.println("Numero invalido, tente novamente\n" + "1-ATACAR"); resp = e.nextInt(); } System.out.println("Você começa a descarnar o animal, coletando o conteudo da caçada."); Som.esfolar(); AnaoIA cervoNorte = new AnaoIA("Cervo", new Guerreiro()); Inventario loot = cervoNorte(cervoNorte.getClasseInimigo()); abrirInventario(loot, "Cervo"); System.out.println("Apos a tarefa voce percebe que grande parte do dia já se passou e é sabio retornar antes que a noite caia sobre Lavitan, apesar desta parte ser apenas sua borda ainda assim, varios perigos podem surgir."); return 2; } if (resp == 2) { return 2; } System.out.println("Numero invalido, tente novamente\n" + "1-AVANÇAR 2-VOLTAR E CONTORNAR"); resp = e.nextInt(); } } if (resp == 2) { System.out.println("Voce segue esgueirando-se da vegetação e rochas, contornando a pequena falésia. Ela parece nunca terminar\n" + "mas voce segue destemidamente seu caminho em busca de algo para caçar.\n" + "Após um bom tempo de caminhada você ja se sente cansado, pois o esforço de andar nesta parte da floresta nomeDaFloresta é grande.\n" + "Passa pela sua mente o pensamento de voltar para casa e ir na vila nomeDaVila comprar alimentos\n" + "embora não seja a opção mais barata neste momento parece bastante convidativa."); System.out.println("1-VOLTAR PARA CASA 2-SEGUIR EM FRENTE"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 2) { System.out.println("" + "▓████▓▓▓██▓▓▓▓▓████▓██████████████▓█▓█▓▓▓▓█▓▓▓▓▓▓▓██▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓██▓▓▓▓▓▓█\n" + "██▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓█▓▓█▓▓████████████████████████▓███████▓▓▓▓▓▓▓█████████▓█████████████▓█▓███████████████▓██████\n" + "██ ░▒░▒▒░▓▓▓ ▒░░░░▒█▒▒ ▓░▒▓▒▒▒ ░░░▓██▓▓▓▓▓█████▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒░░░░▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓▓\n" + "▓▓░▒▒▒▒▒▒▓▓▓▒▓▒▓▒▒▒█▒▒░▒░▓▓▓▓░▒▒▒▒█████▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▒░░░░▒░░▒░ ░░░▒▒▒▒▒▓██▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓▓▓▓▓█\n" + "▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓█▓▒██████▓███████▓█▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▒░░░░▒▒▒░▒▒▒▒▒▒▒░░░▒▒▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█\n" + "▓▓▓▓██████▓▓▓▓▓▓██▒ ░▒▒▒█ ▒▒░░▓██▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███▓███▓▓▓▓▓▓█▓▒░▒▒ ░░▒▒▓▒▒▒▒▒▒░ ░▒▓▒░▒░░░░▒▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█\n" + "▓▓▓███▓█▓▓████▓▓▓█▓▓▒▓▓▒▓▓▓▓▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓▒▒▒▒▓▓▓▓████▓░▒░▒▓▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▒░ ░▒░░▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█\n" + "▓▓▓▓██▓▓▓▓█████▓▓▓▓▓█████████▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒░▒░░▒▒░░░░▒▒▒▒▒ ▒▒▒░▒▒▒▒▒▒▒▒▒░░░░▒▒░░▒▒░░░░░▒▒▒▒▒░▒▓▓▓▓▓▓▓▓▓▓▓█▓▓▓▓█\n" + "▓█▓▓▓█▓▓██▓███▓▓▓▓▓▓▓▓▓▓▓█████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒░ ░░ ░▒░▒░░░░░░ ░░░░░ ░▒▓▒▒░░▒▒▒░▒▒▒▓▓▓▒░▒▒░▒▒░░▒▒▒▒▒▒▓▓█▓▓▓▓▓▓▓▓▓▓▓█\n" + "███████████▓▓▓▓▓▓███▓▓▓▓▓▓▓▓▓███▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▒▒░▒▒░░▒░░▒░░░░▒░░░░░░▒░░░▒▒▒▒▒▒░░░▒▒▒▒▒▒▒░░░░░▒▒▒▒▒▒▒░░░▒▒▓▓▓▓▓▓▓▓▓▓▓▓\n" + "██▓█▓█████████▓█████▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▒▒ ▒▒▒▒▓▒░▒▒░▒▒▒▒▒▒░░░░▒▒▒▒▒▒░ ░▒▓▒ ░▓▒▒▒▒▒▒░ ░▒▒▒▒░░░░▒░░░░░░▒██▓▓▓▓▓▓▓▓▓\n" + "███▓▓▓███▓██▓▓█▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▒░░▒▒▓▒▒░░▒▒▒▒▒░░░ ░ ░░░░ ░▒▒▒▒▒▒▒▒░▒▒░░▒▒▓▒▒░░░░ ░░░░░ ░░░░▒▓▓▓▓▓▓▓▓▓▓▓\n" + "▓█▓█▓████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒░░░░▒▒▓▒░▒▒░▒▒▒▒▒▒░░ ░ ░░▒▒▒▒▒░▒░░▒▒▒▒▓▓▓▒▒▒▒░░░░ ░▒▒▒▓▓▒▒░ ░▒▒▒▒▓▓▓▓█\n" + "▓█▓▓█▓█▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▒░░▒░░░░▒▒▒▒▒▒▒▒▓▓▓▒░▒░░░░░░░░░░░ ░░▒▒░ ▒▒▓▓▓▒░░░░░░▒▒░░░▒▒▒▒░▒▒▒░░░░▒▒▒▒░▒▓▓▓\n" + "▓█▓▓▓▓▓▓▓▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓ ▒▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒░▒ ░░ ░░ ░░░░░░▓▒░░░ ░▒▒░░▒▒▒▒▒░░ ░ ░ ░░░░ ░▒▒░░▒▒░▒▓▒▒▒▒▒▓\n" + "██▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓░░▒░▒░▒░▒▒░ ░░░░░░ ░░░ ░░░░░ ░▒░▒▒▒░░░░▒▒▒▒▒▒░ ░▒▒░ ░░░░░░░▒░░░░▓▓▒▒▒▒▒\n" + "██▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓░░░▒▒ ░▒▒▒░░▒▒░░▒░░░▒░░░░░ ░░▒▒░░░░░░░▒░▒▒▒▒░░▒░░ ░▒▒▒▓▒▒▒▒ ░░░░░░░░▒▒░▒░░▒▒▒▓▒▓\n" + "▓██▓▓▓▓▓▓██▓███▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▒▒░▒░░░░ ▒▒▒▒▒▒▒░▒▒▒░░░▒▒▒░░ ▒▒▒▒░░░░░ ▒░░░░░░░░▒▒░ ░░▒▒▒▒▒▒▓▒▒░ ░░░▒▒▒▒▒░▒▒▒▒▒▒▒▓\n" + "██▓███▓▓▓▓▓▓████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒░░▒░░░░ ▒░░▒▒░▒▒▒▒▓▒░░░░▒▒░░ ░▒░░░ ░ ░▒▒░░░▒▒▒▒▒░░░░▒░░▒▒▒▒░░░░░ ░░░ ░▓▓▒▒▒▒▒▒▒▒▒▓\n" + "▓█▓▓████▓▓▓▓▓▓▓▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▒░░░░▒▒▒░░▒▒░▒▒ ░▒▒▓▒ ░░░▒░░░░ ░▒▒▒▒░░ ░▒▒▒▒▒▒▓▓▒░░░░ ░ ░▒▒░ ░░░░ ░░ ░▒▒▒▒▒▒▓▒▒▓\n" + "▓█▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▒ ░▒▒▒░▒▒▒▒▒▒▒▒▒░▒▓▒ ░░▒░ ░░░░▒▒ ░░░░░░▒░░░▒░░░░░░░░░░░▒▒▒▒▒▒░░▒▒▓▓░▒▒▒▒░ ░░▒▓\n" + "▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓░░▒▓░░░░░░░░░▒░░▒▒░░░░▒▒░░░░░░ ░░░░░ ░░▒▒░░ ░░▒▒░░▒▒▒▒▒░░▒▒▒▒▒▓▒░▒▒▒▒▒▒░░░░ \n" + "▓█▓▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▒░░▒░▒▒▒▒░ ░▒▒░░▒▒░▒▒░░▒░░░░░░ ▒▒▒▒░░ ░▒░░ ░░▒░▒▒░ ░▒▒░░░▒▒▒▒▒▓▓░░ ▒▒▒▒▒▒▓▓▒▒▒\n" + "███▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓ ░░░▒▒▒░░ ░▒▒░▒▒░░░▒░░░░░░▒░░▒░ ░▒▒▒░░ ▒░░░░░▒░▒▒▒▒░░░░ ░░░▒░▒▒▒░▒▒▒▒▒░▒▒▒░▒▒▒░░▒▒▒░░\n" + "▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▒ ░▒▒▒░░ ░ ░▒▒▒░ ░░░░░░░░░░░░ ░ ▒░ ░▒▒▒░░░▒▒▒░░ ░▒░▒░░▒▒▒▒▒▒▒▓▒ ░▒▒▒░▒░ ░░ ░░▒\n" + "▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓░░▒░░░░░░░░░░░░░ ░▒▒▒░░░░░░░░░░░ ░ ░░░▒▒░ ░ ░░░ ░░░ ░░░░▒░▒░░░░▒▒▒▒░▒▒▒▒▒ ░ ░▒▒▒▒▒▒▒▒▒\n" + "▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓ ░▒░░░░░░░░░░ ░ ░░░░░░▒░▒░░░ ░░░ ░ ░░░ ░ ░▒▒▒▒░░░░░░░░░░ ░▒▒▒▒░░░▒░░▒▒░░▒▒▒▒▒░░▒░░▒▒▒\n" + "▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒░▒░░░░░▒▒▓▒▒▒░░░░░░░ ░▒░▒▒░░▒▒░░░░░░ ░░░░ ░ ░ ░▒░░░░░░░▒░░░░░░░▒▒▒░░░░▒▓▒▒▒▒░▒▒▒░▒▒░░░ ░░░░\n" + "▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓░▒▒▒▒▒▒░░░░▒▒░▒▒▒░░░░░░░░░▒░▒▒▒░░ ░░░ ░░ ░░░░░ ░░░ ░ ░░░░░ ░░ ▒▒░░▒░░▒▒▒▒░░ ░░░░▒░░░░░░░▒\n" + "▓█▓▓▓▓▓▓▓▓▓▓▓██▓██████▒░▒▒▒░▒░░ ░▒▒░▒▒▒▒▒░░ ░ ░░▒▒▒░▒▒░ ░▒░ ░ ░▒░ ░░░ ░░░▒░░░░░░ ▒▒░▒▒░ ░▒▒░░ ░▒▒▒▒▒░░▒▒▒▒▒▒\n" + "▓▓▓▓▓▓▓▓▓▓▓██▓▒▒▒▒░░░░░░ ░░░░░░░░░░░▒▒▒▒▒░░ ▒▒▒░▒▒▒▒▒░░░░ ░ ░ ░░░░ ▒▒▒▒▒▒▒▒▒▓▒░░░▒▒▒▒▓▒▒▒▒▒▒▒▒\n" + "▓▓▓▓▓▓▓▓▓▓█▓░ ▒░ ░ ▒▓▒░▒░░░▒▒░ ▒▒▒▒▒░░░▒░▓▒░░░▒░░░░░░ ░ ░ ░░░ ░ ░░░░░ ░░░░░ ░▒▒░░░░▒▒▓▒▒░░░░▒░░░▒▒▒▒▒░\n" + "▓█▓▓▓▓▓▓▓▓▓ ░ ░░░░░ ▒▒░░ ░░░░░░░░▒▒▒▒▒▒▒░░▒▒▒▒░░▒▒░░░░▒▒▓▓▒▓▒▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒░ ░ ░ ░░▒▒▒▒▒▒▓▓▒▒▒░░▒▓▓▒▒▒▒▓▒▒\n" + "▓█▓▓▓▓▓▓▓█▒ ░░░▒▒▒▒░▒▒░░▒░ ░░▒░▒▒▒▒▒░▒▒▒▒░▒░░▒▒▒░ ▒▒▒▒▒▓▓▓▓▓▓▒▒▒▒▒▒▒░▒▒▒▒▒▒▒▒▒░░░░ ░░ ░░░▒▒▒▒░▒▒▒▒▒░▒▒▒░░ ░░░░\n" + "█░ █▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░ ░░░▒▒▒▒░░░▒▒▒▒▒▒░ ▒▒▒░░ ░▒▒ ░░▒▒▒░▒▒▒▒▓▓▒▒▓▒▒▒▒▒▒░░░ ░░▒▒░░▒▒▒ ░ ░▒▒░ ░ ░░▒▒▒▒▒░░ \n" + "█░ ███▓▒░░▓▓▒░░▒▒▒▒░░░░░░ ░ ░░░░▒▒▒░░░░░▒ ░▒▒░▒▒░░ ▒▒░▒▒▒▒▒▒░░ ░░░▒▒▒▒▓▓▓▒░░░░ ▒▒▒▓▒▒▒▒▒▒░░░░ ▒░▒░░ ░░░░ ░░░▒░░░░░\n" + "█▓▒▓░░▒▒▒▒▒░░░░▒▒░ ░ ░ ░ ░░░ ░░░░▒░░▒▒▒▒░░░░░░░░░░ ░▒▒░░░░▒▒▒░░▒▒▒▒░░ ░░░░░░ ░▓▓▓▒░▒▒▒▒▒▒░░░░░▒░░░░░░░░░░░░░░░▒▒▒░▒\n" + "▓▓▒░ ▒▒▒▒▒░░▒▒▒░ ░ ░░░░░░░▒░▒░░▒▒░ ░░░░░ ░▒░░░░░▒▒ ░░░░░░░░▒░ ░░░░ ░▒▒▒▓▒░▒▒▒▒▒▒▒▒▒░░▒░░░░░░░░░░░ ░░ ░░░░\n" + "▒▓▓▒▒▓▒▒░░░▒░░░ ░░░ ░░░░░░░░░ ░░ ▒▒▒░░▒▓░░░░░░░░░░▒▒ ░ ░░░░░ ░░▒▒▒▒▒░▒▒░ ░░ ▓▒░░▒▒░▒▓▒▒▒▒▒▒▒▒░ ░ ░ ░\n" + "▒▒░▒▓▒▒▒░░▒░░░░░▒▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒░ ▒▒ ░▒▒▒▒▒░░░░ ░░░ ░▒▒▒▒▓▓▒▓▓▒▒▒▓▒▒▒▒▒▒▒▒░░░░░░ ▒▒▒░▒▒░▒▒▒▒▒▒▒▓▒▒▒▒░ ░░░░░ ░ ░░░░▒▒▒ " + "\nVocê segue bravamente seu caminho, sempre atento para os sons da floresta, nunca se sabe quando uma presa pode surgir.\n" + "Mas apesar dos seus esforços você se encontra em um “beco” sem saída formado pelas rochas\n" + "nesta parte da falésia não é possivel escalar devido a inclinacao vertical da formação.\n" + "Farto da situação e sem encontrar nenhum alvo para sua caçada voce decide voltar para casa e ir à vila comprar algo"); return 2; } if (resp == 1) { return 2; } System.out.println("Numero invalido, tente novamente\n" + "1-VOLTAR PARA CASA 2-SEGUIR EM FRENTE"); resp = e.nextInt(); } } System.out.println("Numero invalido, tente novamente\n" + "1-ESCALAR 2-CONTORNAR"); resp = e.nextInt(); } return 2; } public long fase2() throws Exception { Scanner e = new Scanner(System.in); int resp; System.out.println("ATO 2"); System.out.println(""); System.out.println("" + " \n" + " ▒▒░ \n" + " ░▒▒▒ \n" + " ░▒▓▒ ▒▒▒░░░ \n" + " ░▒▓▒ ░▒▒▒▓▓▓▒▓▒▓▓▓░\n" + " ░▒▒▒ ░▒▒▒░▓▒▒▒ ▒▒▓▓ \n" + " ░░▒▒▒░░ ░▒▓▒ ░▒▒▒▒ ░░ ░▒▒▓▓▒ \n" + " ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░ ▒▒░ ░░▒▒▒▒▒▒▒▒▒▒▓▓▓▒░ \n" + " ▒▒░░░ ░░░▒▒▒▒▒▒▒░░░ ▒░▒▒▒░░░░░░▒▒▒▒░ \n" + " ░▒░ ░▒░▒▒░▒▒░░░░░▒▒▒ ░▒▒▒▒▒▒▒▒░░░░ \n" + " ░░░▒▒▒░░░░░░░▒▒▒▒▒▒░░▒▒▒ ▒▒▒▒▒▒▒▒▒▒▒░ \n" + " ░░░▒▒▒▒▒▒▒▒▒▒ ░▒▒░ ▒░░░ ▒▒▒▒▒▒▒▒▒░ \n" + " ░░▒▒▒▒▒▒░░▒▒▒▒▒░░░▒▒▒▒▒▒▒▒ ░░░▒▒▒▒▒▒▒▒▒ \n" + " ░▒▒░ ░ ░ ▒▒▒▒▒▒░▒▒ ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒ \n" + " ▒▒▒▒░▒░ ▒▒▒▒▒░░ ▒▒▓░ ░░▒▒▒▒▒▒▒▒▒░ \n" + " ░░▒▒▒▒▒▒▒▒▒░ ░▒▒▓░ ░░░ \n" + " ░░ ░▒▒▒▒ \n" + " ░▒▒▓░ \n" + " ░▒▒▓░ \n" + " ░▒▒▓ \n" + " ░▒▒▒ \n" + " ░▒▒▒ \n" + " ░▒▒▒ \n" + " ░▒▒▓ \n" + " ░▒▒▓░ \n" + " ░▒▒▓░ \n" + " ▒▒▒▒▒ \n" + " ▒▒▒▓▒ \n" + " ░▒▒▒▒▒ \n" + " ░▒▒▒▓▒ \n" + " ░▒▒▒▒▒ \n" + " ▒▒▒▓░ \n" + " ░░▒▓░ \n" + " ▒ ▒▒▓ \n" + " ░█▒█▒ ░▒▒ \n" + " ▓▓██▓ ▒▒▒ ▒▒▒░ \n" + " ▒▓▓▓█▒ ▒░░▓▓█▓▒ \n" + " ░▒▒▓▓▓ ▒▒████████ \n" + " ▓▓▓▓██ ░▒▒▓███████ \n" + " ▒█▓▓▒ ▒▓▒ " + "\nVoce retorna enfrentando as adversidades do caminho já passado anteriormente, agora com mais pressa e tranquilidade\n" + "por fim depara-se com a bifurcação norte/leste e ve o caminho ao sul de onde veio.\n" + "1-VOLTAR PARA A CIDADE 2-IR PARA O LESTE 3-IR PARA O NORTE"); resp = e.nextInt(); while (resp != 1 || resp != 2 || resp != 3) { if (resp == 1) { break; } if (resp == 2) { irLeste(); break; } if (resp == 3) { irNorte(); break; } System.out.println("Numero invalido, tente novamente\n" + "1-VOLTAR PARA A CIDADE 2-IR PARA O LESTE 3-IR PARA O NORTE"); resp = e.nextInt(); } System.out.println("Seguindo seu caminho de retorno para sua casa voce tem o pressentimento de estar sendo observado, nao interrompe sua caminhada mas sua atenção eleva-se\n" + "para qualquer ruido ou movimentacao nas proximidades. Com um subito salto um Goblin surge na sua frente, com uma clava em mãos segue velozmente em sua direcao.\n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " ░░░ \n" + " ▓▓░ ▓███████▓ \n" + " ██▓ ░██▓▓▓▓▓░▒██▒ \n" + " ░███ ████▓▓▓▒░░▒▓█▒ \n" + " ░░░ █▓▓███▒░▒▒▒▓▒▒▒▓▓█▓ \n" + " ██▓▓▓▒ ▓▓▒▓▓▒▓▓ ░▓▓▓▓▒▒▒░▓█▒ \n" + " ▒▒▓▓▒▒▒░ ▓█▓▓███▓▒▒░▓█▓ ░ ▓▓▒ \n" + " ▒░▒▒░░ ▓░▒▓▒▓███▓▓██▓░▒▓▓▓▒░ \n" + " ▒░ ░ ▓ ▓ ▒▒░▓▒▓▒░▒▓▓▓▓ \n" + " ▒▒░ ░▒▓▓▒▒▒▓▓▒ ▓▓▒░░░░▒▒░ \n" + " ▒▓▓▒░ ░░ ▓█████▓▓▓▓▓██▓▒░▒▒░░ \n" + " ░▓▓▒ ░██████▓▒▓▒▒▓████▓▓▓▒░▒▒▓░ ▒▓█████▒ \n" + " ▓█▓▓░ ▒█▓▓▓███▓▓▓▒▒▓▒▓▓▒▒░░░░░░▒▓▓▒░ ▓█▓███▓░▒▒ \n" + " ▒█▓█▓░ ░▒▓▒▓▓▓████▓▓▓▒▒░ ░░░░▒▒▒▓▓▓▓▒▓█▓▓ ▓▒▓▒▓█▓ \n" + " ▓██▒▓█▒░░▒▒░ ▓▓▓███▒▒▒▒▓▓▒░▓███▓▒▒▒▒░░▒▒▒▓▓░▓ ░ ░░░ \n" + " ▓▓▓█▓░▒░▒░░ ▓▓▓▓▓▓▓▒▒▓▒▒▒▓██▓▒▒▒▒░░░▒▒▒░▒▒░▒▒▒ \n" + " ░▒░ ░▒▒░░ ▓▓▓▒▒▒▒▒▒░░░░▓█▓▒ ▒░ ▒░░▒ \n" + " ░▒░ ▒█▓▓█████▓▓▒▒░▒▒▒░░░ \n" + " ░▓▒ ░ ▓▓▓▓██████████▒▒░░░▒ \n" + " ▒▓▓▒ ░█▓▓▓▓▓███▓████▓▒░░░▒ \n" + " ░░ ▓▓▓▓▓▓▓▓▓▓▓▒▒▒░▒▒░▒░ \n" + " ▒▒▒▒▓▓▓▒▒▒▒░░░░░░░▒ \n" + " ░▒░░░░░ ░ ░▒ \n" + " ░░▒▒▒░░░░░░░░░░░░▒▓▒ \n" + " ▒░▒▒░░░░░░░░░░░▒▒▒▒▓▒ \n" + " ▒░▒▒▒░░░░░░░░░▒▒▒▒▒▓▓▒ \n" + " ▓▒▒▒░░░░ ░▒▒▒▒▒▒▒ \n" + " ▒▒▒░░ ▒▒░░ \n" + " ░░░ ░░░ \n" + " ▓▒░░░ ▒▒▓▓ \n" + " ▒▒▓▒░▒░ ░▒▒░▒▒ \n" + " ▒▒▒▒▒▒▒░░░░░░ ▒▒▒░▒▒ \n" + " ░░░░ ░░ ░ \n" + "Apesar dos poucos segundos do surgimento da criatura voce observa sua grotesca feição, a boca aberta exibia seus amarelados dentes\n" + "também vestindo alguns farrapos e um velho colete de couro. Sem titubear voce esta pronto para a batalha\n" + "com um rapido movimento já está com sua arma em maos e a adrenalina elevada.\n" + "1-ATACAR O GLOBIN 2-FUGIR"); Som.globin(); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { Som.lutasoco(); System.out.println("Com a criatura estirada ao chao na sua frente voce ainda com o sangue quente da pequena luta, espera que mais deles aparecam para lhe enfrentarem\n" + "pois estas criaturas sempre atacam em bando. Apesar do pequeno momento de espera mais inimigos não aparecem"); AnaoIA globinFase2 = new AnaoIA("Globlin", new Guerreiro()); Inventario loot = globinFase2(globinFase2.getClasseInimigo()); abrirInventario(loot, "Goblin"); break; } if (resp == 2) { Som.corre(); break; } System.out.println("Numero invalido, tente novamente\n" + "1-ATACAR O GLOBIN 2-FUGIR"); resp = e.nextInt(); } System.out.println("Voce acha estranho, mas agradece mentalmente que isso não aconteceu, um inimigo, apesar do susto, não era um grande oponente\n" + "mas uma grande quantidade com certeza seria perigoso. Sem mais nada a fazer resta apenas retornar para a sua morada ao sul.\n" + "1-IR PARA O SUL"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-IR PARA O SUL"); resp = e.nextInt(); } System.out.println("" + "██████████████████████████████████████████████▓█▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▒▒▒▒▒▒▒\n" + "████████████████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "█████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▒▒▓▒▒▒▒▒▓\n" + "███████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▓▒▒▒▓▒▒▓\n" + "████████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▓▒▒▒▓▒▓▒▓\n" + "███████████████████████████████████████████▓█████▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▒▓▒▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▓▒▓▓▓▒▒▓\n" + "██████████████████████████████████████████▓█████▓▓▓▓▓▓▒▓▒▓▓▓▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▒▒▓▓▓▓▓▓▒▓▒▓▒▓▒▓▒▓▒▓▒▓▒▓▒▓▓▓▓▓▓▓▓▓▒▓▒▓▒▓▓▓▒▓▒▓\n" + "████████████████████████████████████████████████▓▓▓▓▓▒▓▓▓▓▓▓▓▓▒▒▒▒▓▓▓▓▓▒▓▒▓▒▒▒▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▓▒▒▒▓▒▓▒▓▓▓▒▓▒▒▒▒▒▓▓▓▒▓▒▒▒\n" + "████████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▓▓▒▓▒▓▒▓▒▓▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▓▓▓▒▒▒▒▒▒▒▓▒▓▒▒▒▓\n" + "███████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▒▓▒▒▒\n" + "██████████████████████████████████████████████▓█▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▒▒▓▓▒▓▒▒▒▒▒▓▓▓▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▒▒▒▓\n" + "███████████████████████████████████████████████▓█▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▒▓▓▓▒▓▓▓▒▒▒▒▒▒▒▓▒▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓\n" + "███████████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▒▒▒▒▒▒▒▓▒▓▒▒▓▓▒▓▒▓▒▒▒▒▒▒▒▓▒▒▒▓▓▓▓▓\n" + "███████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓\n" + "██████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓\n" + "█████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▒▓▓▓▒▓▓▓▓▓▓▓▒▒▒▒▒▓▓▓▓▓▒▓▓▓▓▓▓\n" + "████████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▒▓▓▓▒▓▒▒▒▓▓▓▓▓▓▓▓▓▓▓\n" + "█████████████████████████████████████████████▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓███▓▓▓▓█\n" + "███████████████████████████████████████████████▓▒▒▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "███████████████████████████████████████████████▓▓▒▒▒▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒░░░░░▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓███████████████████████████████████████████▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░▒░▒▒▒▓▓▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "███████████████████████████████████████████▓▓▒▒▒▒▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒░▒░▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓█▓███▓████████████████████████████████████▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░░░▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓████████████████████████████████████████████▓▓▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒░▒▒▓▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓█▓███████████████████████████████████████████▓▒▒▒░▒▒▓▓▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▓▒▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓█▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "███████████████████████████████████████▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "████████████████████████████████████████▓▒▒▒▒▒▓▓▓▓▓▓▒▒░▒▓▓▓▓▒▒▒▒▒▒▒▒░▒▒▒░░▒▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "████████████████████████████████████████▓▓▒▒░▒▓▓▓▓▒▒▒░▒▒▓▓▓▒▒▒▒▒▒▒▒▒▒░▒░░░▒▓▓▒▒▒▓▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓███████████████████████████████████████▓▒▒░▒▒▒▓▒▒▒▒▒▒░▒▒▓▓▓▓▒▒▒▒▒░▒▒░░░░▒▓▓▓▓▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "████████████████████████████████████████▒▒░░▒▒▒▒▒░▒▒▒░░▒▓▓▓▓▓▓▒▒▒▒░░░░░▒▒▒▓▓█▓▓▓▓▓███▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓█▓████████████████████████████████████▓▓▓▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▒░░▒░░░░▒▒░░▓█▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "██████████████████████████████████████▓ ░▒▒▒▓▓▓▒▒▒▒░▒░▒░░░░▒▒▓▒░░░░▒▒▒░░░░░▓█▓█▓█▓█▓█████████▓█▓█▓▓▓█▓▓▓█▓█▓█▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "▓█▓█▓▓██▓█████████████████████████████▓▒ ▒▒▓▒▒▒▒░░░░░░░░░░▒▒▒░░░░░░▒▒▒░ ▒████████████▓▓█████████▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "░ ░░ ░░░ ░▒░░░ ░ ░▒░░░░░░▒▒░▒░ ▒▒▓▓▓▓▒░░▒▒░░░░░░░░░▒▒░░░░▒░░░░ ░░░░░░░░░ ░ ░ ░ ░▒░░ ░░░ \n" + "░░░░ ░ ░░░ ░ ░▒░░░ ░ ░░▒▒▒▒▒▒▒░░▒▒▓░░▒▒▒░░ ░░▒░░░ ░░ ░ \n" + "▒▒▒▒▒▒▒▒▒▒▒▒▓▒░ ░▒░ ▒▒▓▒▒ ░░░░░░░░░░░░░ ░░░░░░░░░ ░ ░░ ░ ░ ░ ░ ░░░░ ░░ \n" + " ░▒▒▒░░░▒░░░▒▒▒░▒▓▒▒▒░░▒▒▒▒▒░▒▒▒░░░░▒░░░░░░░░░░░░ ░░░ ░ ░ ░ ░ ░ ░ ░ ░░░ ░ ░░ ░ ░░░░░░░▒\n" + "▒▒▒▒▒▒▒░░▒▒ ▒▒▒░░░▓▓░░░░▒▒▒░░░▒░▒▒░░▒▒▒▒▒▒▒░ ░▒▒▒▒▒▒▓▒ ░░ ░░ ░ ░░░░░ ░ ░ ░░░▒▒▒▓\n" + "▒▓▓▓▒▒▒▒▒▒▓▓▒▒▒░ ░░ ░▒▒▒░ ░░▒░░ ▒▒░▒░ ░ ░░ ░ ░ ░ ░░ ░ ░ ░ ░░░▒▒░░░░\n" + "░░░ ░ ░░ ░ ░ ░ ░ ░ ░░░░░ ░░ ░▒░░░ " + "\nCaminhando para fora da floresta, ainda com sua arma em mãos para prevenir qualquer surpresa e encurtar seu tempo de reacao\n" + "nesta parte da mata já é possivel avistar o sol comm clareza pois a vegetação nao e mais tao densa\n" + "Voce observa que ja passou do meio dia e a fome é grande, fazendo apressar seu passo. Ao sair da mata\n" + "voce avista fumaça no ceu, nao é algo totalmente estranho, mas a quantidade meio que lhe deixa preocupado.\n" + "Entao voce sente vontade de saber do que se trata, procurando um ponto de observacao melhor.\n" + "1-OBSERVAR"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-OBSERVAR"); resp = e.nextInt(); } System.out.println("" + " \n" + " ▒ \n" + " ░▓▓█▓▓█▓▓███▒ \n" + " ░ ▒▓▓░ ▓█▓██▓████████▓░ \n" + " █▓ ▓██▓██▓█▓▓████▓▓███▓▓░▒ ▒░ ▒ \n" + " ▒██▓▓██▓▓█▓▓░ ▓▓▓█▓█▓██▓▓██▓▓▓▓█▓▓▓▓▒▒▒▓█▓█▓ ░▓▓░▓ \n" + " ▓█▓▓███▓███▓██▓▒░▒█▓▓█▓▓▓▓▓▓█▓█▓▓▒▒▓▒▒▓██████████▒▓░ \n" + " ░▓█▓ ▓▓▓███████████▓▓▒▒▒▒▓▓▒▒▓▓▓▓▓▓▓▓▓▒░▒▒▒▓█▓████████▓▒░ \n" + " ▓▓█████▒░░▓▓▓▓█▓████▓▓▓█▓▒▒▒░░▓▓▓░░░░░░▒▒▒▒░░░▒▒░▒▒▓█▓▓▓▒▒▒▒▓▒ \n" + " ▓████████▓▓▓▓▓█▓▓▓▓█▓▓▓▓▒▒▒░ ░▒▒░░▒░ ░░▒░░░░░░▒▒▒░░ ░ ▒▒░▒▒░▒ ░▒░ \n" + " ▒▓▓█▓▒▓▓▓▓██▒▓▓▓▒▓█▓▒▓▒▒▓▒ ░░░░░ ▒▒▓▓▒▒▒░▒▒▒░▒▒▓██▓▓▓▓▓▓███▓▒▓▓▓░ \n" + " ▒▒▒▒▒▒▓▓▓▓▓▓▒▓▓░▒░▓▓█████████▓█▓▓▒░░▒▒▓▓████████▓██▓▓█▓▓░ \n" + " ▒▒░ ▒░ ▒▒▓▓▓▒░░▒▓░░▒▓██████████▓▓▓▓█▓▓▒▒▒▓▓▓▓████████▓▓▓▓▒▒▒▒░ \n" + " ▓███████▓▓░░▒▒░░▒▒▒░▒▒▒▒▒██▓▓▓▓████▓▓▓▓█▓▒▓▒░▒▒▒▒▓▓█▓▓█▓█▓▓▓▒▓▓▓▓░░ \n" + " ░▓██▓██▓▓██████▓▓▒░▒▒▒░░░░░░▒▒▓▓▓▓▓▓▓██▓▓▓▒░▓▒▒▓▓▒▒▓▓░▒▒▒▒░▒░░▒▒▒▓▓░▓ \n" + " ▒█▓█▓▓▓███▓▓▓▓███▓▒▒▒▒░░ ▒▒▓▒▒░░ ▒ ░▒░ ░░ ░▓███▓▓█░░▒░▒█▓▒░░░▓ \n" + " ░▒▒▓▓▓▒▓▓███▓█████▓▓▒▒░░░░░░░░░░░ ░ ░ ░░▒▓▓▒▒▒▓████▓██████▓▓▓█▒ ▒▒▓ \n" + " ░▓▒▒▒▒▒▒▓▓▒▒▓▓▓▓▓▓▒░░▒▒▒░▒▒▒▓▓▓█▓▓▓▓█▓█▓▓█▓▓▒▒▒▒░░░░▒▒▒▒▓█▓██▓▒███▓███████▒░░░ \n" + " ░█▓▓▒▒░▒▒▒▒▒▒▒▒▒▓▓▒▒░▒▒▒▒▒▒▓█▓████▓▓██▓████▓████▓▒░░░░▒▒▒▒▓▓▓▓█▓▒██▓█▓▓████▓▓▓▓ \n" + " ░▒▒░▒▓▓▒░▒▒▒▒▒░▒▒▒▒▓▒▒▒▓▓▓▓█▓█▓▓███▓▓█████▓▓███▓▓▓░░░░ ░░▒▒▒▓▒▓▓█▓█▓▓█▓▓▓▓▓▓▓▓▓ \n" + " ░ ▒▒ ░░░▓▒▒▒▓▓██▓███▓▓▓█▓▓▓██▓▓▓▒▒▒▓▓▒▓▓█▒▒▒░░ ░░▒▒▒▒▓▒▒▓▓▓▒▓▓▓▓░▒▓▒▒░ \n" + " ▒▒▓▒▓░░░░░░░░▒▒▓▒▒▒▓▓▓▓▒██▓█▓▓▓▒▒▓▒▓▒▒▒▓▓▓▓▓▓█▓▒░░▒▓▒▒░▒▒▒▒▒▓▓▒▒▒▓▓▓▓▓▓▓▒▒▓▓░ \n" + " ░▒▓▓████▓▒▓▓▓▒░░░ ▒▒▒▒▒▒▓▓░▓▒▓▒▒▒▓▒▓▒░▒▒▒▓▓█▓▓▓█▓▒▒▒▓███▓▓░ ░░░▒▓▒▒▒░▓▓▒░▒▓▓ \n" + " ▒▓█▓▓▓▓▓▓▓▒▓▓▓▓▒▓▒▒░░▒▒▒▒▒▒▒▒░▒▒▒▒▓▓▒▒░░░░▒▒▒▓▒▒░▒░░▒▓███▒▓▓▒▒▒░░░░░░░▒░░▒▒░▒▒▒▒▒ \n" + " ▒█▓▒▓▓▒▒▒▓▓▓▓▒▓▒░░░░▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░▒░░░░▒▓▒▓▒░░░░▒▒▒░▒▒▒▓███▓▓░░░░▓▒▒░░░▒▓▓░▒ \n" + " ░░░░▒▒▒▒░▒▒▒░░░░░░░░▒▒▒▒ ░░░▒░▒░ ░░░▒▒ ▒▒░▒▒▓▒ ░▒▒░░ ▒▒░▒▓▓███▓▓▒▓▓▒▒▒ ░ ▓▒ \n" + " ░▓▒▒▓░▒▒▒▒▒▒▒░░░░░▒▒░▒▓▒░░▒░░░░░░░ ░▒░░ ░░▒ ░░▒▒░▒▒▓▒▒▓▓█▓▓▒░▓▓▒ \n" + " ░ ▒▓▒▒▒▒░▒▓▒▒▒▒▒▓▓▒▒▒▒▓▒▒▒▒▒░░░░░░░░ ░░▒░░░▒▓▓▓▓▒▒▒▒▒▒ ▒▓▒▓░ ░ \n" + " ▒▒▒▓▓░▓█▒ ░░▒ ░▒▒▒░░▒▒░ ░░░░ ▒█▓▓░▒▓▒ \n" + " ░ ░▒░░░▒░░░░░ ░░ \n" + " ░▒░▒░░░░░░ \n" + " ▒▒▒░░▒░▒ \n" + " ▒▒▒░▒▒░▒░ \n" + " ▒▒▒▒▒░░▒▒ \n" + " ▓▒▒▒▒░▒▒▒ \n" + " ▒▒▒▒▒▒░░▒▒ \n" + " ░▓▒▒▒▒▒░▒▒▒▒░ \n" + " ░▒▒▒▓▓▓░▒▒▓▒░░▒▒▒▓▒▒░ \n" + " ░░░▒▒▒░░░▒▒▒░ ░░░░▒▒▒ \n" + " " + "\nSubindo em uma arvore já nas proximidades de sua casa voce avista a vila toda envolta em chamas e fumaca.\n" + "Uma sensação de desespero enche seu corpo, a sensação de perder entes queridos novamente o deixa perplexo\n" + "o unico pensamneto agora é correr para a vila.\n" + "1-CORRER PARA A VILA!"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-CORRER PARA A VILA!"); resp = e.nextInt(); } System.out.println("" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " ░ \n" + " ░▒▒▒▒▒▒▒▓▓▓▓▓▓▒▒▒▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒░ \n" + " ░░▒▓▓▓▓▓▓▓▓▓▒▓▒▓▓▓▓▓▒▓▓▓▓▓▓▒▓▓▓▓▓▓▓█▓▓▒▒▒▒▒ \n" + " ░▓▓▓▓▓▓▓▓▓▓▒▒ ░ ░░▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▒░░░▒▒▒▓▓░ \n" + " ▒▒▒▒▒▒░▒▒▒▓▓▓▓▓▓▒▒▒▒▒▒░░░▒▒▒░░░▒▒▒░░▒▒▒▒▒▓▓▒▓░ \n" + " ▒▓▒ ▒░ ░░▒▒▓▓▓▓▓▓▓▓▓▒▒░░ ░░▒▒▒▒▒▒▒▒▒▒▒▒▓▒ \n" + " ░▒▓▓▓░ ░░ ▓▓▒░ ░░▒ ░░▒▒▓▓▓▓▓▓▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▓▒░░ \n" + " ▒▓▓██▓ ░ ░ ░▒▒░ ░▒▒▒░░░ ░░░░ ░▒▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒░▒ ░▒▓▒ \n" + " ░▒▓░▒██▒ ▒▒▒░▒░▒▓▒░ ░▒░ ░▓▓▒░░░░░░ ▒▓▓▒▒▒▒▒▒▒▒▒▓▒▓▓▓▓▓▓░ \n" + " ░ ░░░▒██▒ ░░▒ ░ ░░░░ ░░▒▓▒▒░░▒░░▒░░ ░▓░ ▓▓▒▒▒▓▓▒▒░▓▓▓▓▓▓▓▓▓ \n" + " ░░░▒ ░█▓█▓▓▒▓▓▓▓▓▒ ░░░▒▒░░░ ░░▒▒▒ ░▓▓▒▓ ░█▓▓░▒▓▓▓▓▓▓▓▒ \n" + " ▒▒▒ ░▒▒▒▓▓▓▓█▓▒░░░▒ ░ ░░▒░░ ░▓▓▓▓▒▓▒ ▓█▓▓▓░░░ \n" + " ▓▓▓▒▒▒▒▒▒░░ ░ ░░▒▓▓░░░▒ ░▓▓▓▓▓▓▒▒▒▒▒▓▒░ \n" + " ▓▓▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░ ▓█▓▒▒▓▓▒ ▒█▒ \n" + " ▒▓▒▒▒▒░▒▒▒▒▒░▒░░░▒▒▒▒▒▒▒▒▒▒░░▒░░ ▒▓▓▓▓▒▓▒░▒ \n" + " ░▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░▒▒▒▒▒░░ ▒▓▓▓▓▓▓▓░ \n" + " ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒▓▓▒ ▒█▓▓▒▒▓░ \n" + " ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▒░▒ ▓▒ \n" + " ▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓░ \n" + " ░░░░▒▒▒▓▓▒▒▒▒▒▒▒▓▓▓▒▒▒▒▒▒▒▒░▒░▒▒▒▒▒▒▒▒▒▒▒▓▓ \n" + " ░▒▒▒░▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒▒▒▒▒▒▒▒▒▒▒▓▒ \n" + " ░▒▒▓▓▒▒░ ░░▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒▒▒▒░░▒▒▒▓▒ \n" + " ░▓▓▒▓▒▒ ▒▒▒▒░▒▓▓▒▒▒▓▓▓▒░ \n" + " ▒▓▓▒▒ ░▒▒▒▓▓▓ \n" + " ░░░ ▒▓▒▓▓▒ \n" + " ░▓▓▓▒░ \n" + " ▒▒▒▒▒ " + "\nAbandonando sua bolsa (cheia ou nao) voce se põe em corrida, ignorando o cansaco e fome das suas acoes anteriores.\n" + "Apesar de voce ser um estrangeiro aquelas pessoas o receberam de bracos abertos, ha alguns anos voce convive com eles\n" + "seja em confraternizações, trabalhos ou caçadas, eles não eram sua familia de sangue mas não importava, voce gosta deles\n" + "e vice versa. A vontade de chegar logo lhe forcava a correr ainda mais rápido.\n" + "1-USAR TODA A SUA ENERGIA PARA CORRE MAIS 2-CORRER NORMALMENTE"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { Som.corre(); System.out.println("" + " \n" + " ▒▒▒▒░ \n" + " ░ ▓▒░▒▒▒▒ ▓▒ \n" + " ░▓▒█░▒▒▒▒▒▒▒ ▒▒▒▒▒▒░░░ \n" + " ▒▓▒▒░░▓▒▒ ▓▓▓▒▒▒▒▒▒▒▒▒ \n" + " ░▒▒▒▒░▓▓▓█▓▓▒▒▓▒▒▒▒▒▒▒▒▒▒▒▓▓ \n" + " ▓█▓▓▒▓▓▒░█████ ░▒▒▒▒▒▒▒▒ ░▒▒▓█ \n" + " ██▒░ ▓█▒▒░ ▓██░ ░░░▒▒▒▒░ ░▒▒ \n" + " ░▓█▓▒▓▒▓▓░░░░░ ░░▒▒▓▒ ░░▒▒ \n" + " ▓▓▓░░░░▒░░░░░░▒▓▓▓▓▒░▒▒▓ ▓▒▒▒ \n" + " ░▒█▓░ ░ ░░░▒▒▒█████▓▒▓▓░ ░ \n" + " ░▒▒▓▒▒▒█▒ ▒▒▒▒▒▓███▓▒▒▒▓ \n" + " ░░▒░ ▓█░░░▒▒▒░░░ ░░▒ \n" + " ▒█▒░░░░░ ░▒░░▒ \n" + " █░░░▒▒░▒▒▒░░ \n" + " ▒▓░▒▒▒░░▒▓▓▓ \n" + " ░▓▒▓█▓▓▒█▓▒░░ \n" + " ▓ ░▒▒▒▒▒░░▒▒░ \n" + " █▒ ░░▒░░░▒▒░▒ \n" + " ▒█░░▒░░█▒▒▒░▒░ \n" + " ▒█▒▒▒▒█ ▒▒▒▒▒ \n" + " ░▓▒░▒█░▒▒░░▒ \n" + " ▒▒░▓▒▒▒░░▒ \n" + " ░▒░▒░▒░ \n" + " ░▓░░░▒░ \n" + " ▓░▒▒░ \n" + " ░▒▒▓░ \n" + " ░▓░▒░░ \n" + " ░▓ ░░▒░ \n" + " ▓ ░░▒ \n" + " ▓▓████ \n" + " ▓████▓ \n" + " ░███▓▓ \n" + " █▓█▓█ \n" + " █▓███░ \n" + " ▓▓▓██░ \n" + " █▓██ \n" + " ▓█▓██▒ \n" + " ▒█▓███▓ \n" + " ▓▒░ "); System.out.println("\nNOSSA!! Voce é o Usain Bolt dos RPGs!"); break; } if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-USAR TODA A SUA ENERGIA PARA CORRE MAIS 2-CORRER NORMALMENTE"); resp = e.nextInt(); } System.out.println("O caminho para a vila parecia aumentar em vez do oposto, as passadas não condiziam com a vontade de chegar.\n" + "Por fim, voce finalmente para de correr na entrada da vila, Seus olhos nao conseguem acreditar no que veem\n" + "e o cansaco da corrida não vem. Quase todas as construcoes viraram uma pilha de cinzas e as que ainda resistiam\n" + "ardiam em chamas, varios corpos sem estavam espalhados pelo local, alguns ardendo em chamas junto das construcoes.\n" + "Uma indescritivel sensação domina seu corpo mas voce nao fala nada, está em um estado catatônico, apenas\n" + "as suas ainda respondem, e elas deviam ir em direção ao centro de Kenko.\n" + "1-IR AO CENTRO DA VILA 2-SENTAR E DESCANSAR"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 2) { System.out.println("Voce, aparentemente desolado desaba no solo, ficando ali algum tempo\n" + "passado algumas horas, resolve ir ao centro de Kenko"); break; } if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-IR AO CENTRO DA VILA 2-SENTAR E DESCANSAR"); resp = e.nextInt(); } System.out.println("" + " \n" + " \n" + " ▒ \n" + " ░▓▒ \n" + " \n" + " ░ \n" + " ░ \n" + " \n" + " ░░░░░░░ ░░░░ ░░░ \n" + " ░░░░░▒░░░░▒░░▒▒░▒▒░░▒▒▒░▒░░ \n" + " ░░░░░▒░▒░░░░▒▒░░▒▒▒░▒▓▒▒░▒▒▒▒▒▒▓▒▒░ \n" + " ░ ░ ░░░ ░░░▒░░░▒▒▒░▒▒▒░░░░ \n" + " ░ ░▒▒▒ \n" + " ░░░░▒░░░ ░░░░ ░ ░▓▓▓░ \n" + " ▒▓▓▒ ░▓▒▒ ▒▓▓ ░▓▓▒▓▓▓▒ ▓▓▓▓▒▓ ░▒░▒▒▓░ ░▒▒▒▓▓ \n" + " ░▒▓▓▓▓ ▒▒▒▓▓▓▒▒▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▒ ░▒▒▒▓▓ \n" + " ░▒▒▓▒▓░ ░▒▒░▒░▒▒▒▒▒▒▒▒▒▓▓▓▓▒▒▒▒▓▓▒▒▓▓░░ ░░▒▒▓▓ \n" + " ░▒▒▓▒▓▒ ░▒▒▓▓█████▓░░▒▒▒░░░▒▒░░░░░░▒▒▒░░▒▒░▒▓███████▓▓▓▓▒▒▒░░ ░░▒▓▓ \n" + " ░░▒▓▒▓▒ ░░▒▒▒▓▓██████▓▒░ ░░░░░░░░░░▒▒▒▓▒ ░░▒▓▓▓████▓▓███████░ ▒▓▓ \n" + " ░░▒▒▒▒░▒▒▓▓▓▓▓▓▓▓██▓▓▓▓▓▓▒░░ ░▒▒▓▓▓ ░▒▒▓▓████▓▓▓█▓▓▓▓▓▓█▒ ▒▓█░ \n" + " ░▒▒▓▓▒░▒░░░░░▒▒▒▒▒▒▓▓▓▓██████████▓▓▓▓▓▓▒▒▒░▒▒▓▓▓▓▓▓███████▓▓▓▓▒▓▓▒▓█▓▓▒▒▒▒ ░▒▒ \n" + " ░░▒▓▒▒░░░░░░░░░░░ ░ ░░░░▒▒▒▒▓▓██████████░▒▒▓▓▒▓████▓▓▓▓▓▓▒▒▒▓▓▓▓▓▓▓█▒░▒█▒ ▒▒▓░ \n" + " ░░ ░░░▒▒░░░░░░░░░░░░░░░░░░ ░░ ░░░▒▒▒ ▒░▒▒▒▒▓▓▓▓▓▒▓██▓▓▓█▓▓▒▒▒░ █████▒ ▓▓█▒ \n" + " ░▒▒▒▓▓▓▒▒▒▒▒▒▒▒▒▒░░░░░░░░░░░░░░░░░░░░░░░░▒▒▒▒▒▓▓▓▒▓▓▓▒▒▒▓██▓▒░▒▓▓▓▓▓▓▒ ▓██░ ▒▓█░ \n" + " ░▒░▒▓▒▒▒░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░░░░░░░▒▒▓▓▓▓▓▓▒░░░▒▒ ▒▓███▓▓▒▒▒▒▓▓▓▓▒▒█▓▒▓█▒ \n" + " ░▒▒▒▓▒▒▒▒▒▒▒▒░░░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░ ░▒▒▒▓▒▒░▓▓▓▓▓░▒░░▒▓██▓▓▓▓▓▓▒▒▒▒▒▓▓█▓▒▒▒▒▓▓▒ \n" + " ░▒▒░▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░▒▒▒▒▒▒▒▒▒▒▒▒▒ ░░░░░▒▓▓▓▒▓▓░▒▓▓░▒▓▓██▓░▒▓▓██▓▒░ ▓█▓░░▒▓▓ \n" + "░▒▒▒▓▓▒▒░░▒▓▒▒░░▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░▒▒▒▓▓▓▓▓█▒▒▓▓▓▓ ░▒▓▒░░▒▓▓██▓▒▒▒▒▓▓▓▓▓░▒██▓▓▓ \n" + " ▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒░▒░░░░░▒▒▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▒▒▓█▓░░▒▓▓▒▓▒░▒▓██▓▓▓▓▓▓▒▒▒▒▓▓▓█▓ \n" + " ▒▒▒▒▓█▓▒▓▓▓▓▒░▒▒▒▒▒▒▒▒▒░▒▒▒░▒░▒░░░▒░▒▒▒▒░▒▒░▒▒▒▒▒▓▓▒▓▓░▒▒▒▓░░▒▓▓▓▓▓▓▒░▒▓██▒░▓▓██▓▒▒░░▓█▓▒ \n" + " ░░░░░ ░▒░ ░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░▒▒▒░░░░▒▒▒▒▒█▓▒▓▓▒▓▓██▒ ▒▒▒▒▓▓▓▓▓░░▒▓██▓▒░▒▓▓█▓▓▒ ▒██▒ \n" + " ░░░░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▓██████▒ ▒▒▒▒▒▒▒▓▒▒▒░░▓██▓█▓▓▒▒▒▓▓█▓▓▓██▒ \n" + " ░░░░▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▒▓▓▓▒ ░▒▒▓▓▓▓▓▓▓█▓▓░▒▓██▓▒▓████▓▒ ▒▒ \n" + " ▒▒▒░▒▒▒▒▒▒░ ░░▒▒▓▓▓▒▒▓████▓░ \n" + " ░▒▒▒░▒▒▓█░ ▒▒▓▒ \n" + " ░░▒▒▒▒░ " + "\nA paisagem anteriormente se extendia a esta parte da vila também, apenas o antigo templo em seu centro ainda resistia\n" + "nao por falta da insistência de seja lá quem que atacou este pacato lugar, suas grossas paredes feitas de solidas\n" + "rochas a muito tempo, desde que se tem registro esta edificação existia em Kenko\n" + "certamente fazia parte de algo antigo.\n" + "Apenas sua porta estava totalmente destruida, certamente os moradores tentaram abrigar-se do ataque neste solido abrigo.\n" + "Voce sente que deve entrar.\n" + "1-ADENTRAR NO TEMPLO"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { Som.templo(); break; } System.out.println("Numero invalido, tente novamente\n" + "1-ADENTAR NO TEMPLO"); resp = e.nextInt(); } System.out.println("A imagem ali dentro com certeza não sairá da sua mente tão cedo, varios corpos mutilados e sinais de luta pelo ambiente.\n" + "Em meio ao silencio enlouquecedor voce ouve alguns gemidos. mais que depressa tenta localizar a origem deste som\n" + "uma breve sentelha de esperanca ascende em seu interior.\n" + "2-PROCURAR A ORIGEM DO SOM"); resp = e.nextInt(); while (resp != 2) { if (resp == 2) { break; } System.out.println("Numero invalido, tente novamente\n" + "2-PROCURAR A ORIGEM DO SOM"); resp = e.nextInt(); } Som.ferido(); System.out.println("" + " ▒ \n" + " ▒░ ▒▓ \n" + " ▓▓ █░ \n" + " ▒█ ░█ \n" + " █░ █▓ \n" + " █▓ █░ \n" + " ▓█ ▓█ \n" + " ░█ █▓ \n" + " █▒ ▒▓██░ \n" + " ▒██▓ ▒▓▒ \n" + " ▒▓█▒ ▓▓▓▒▒░ ░▒▒▓▓██▓ ░ ▓▒ \n" + " ▓░ ░ ░ ░░░░░░░░░▓████████████████▒ ▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░ ▒█▓ \n" + " ██▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░▓█████████████████░░▒▒▒░▒▒▒░▒░▒░▒░▒░▒░░░▓█▓ \n" + " ▒██▓ ░░░░░░░░░░░░▒░▒░░░░▓█▓███████████████▓░░░░░░▒░░░░░░░▒░░░░░░▒▓ \n" + " ░░░▓▓ ░░░░░░░░░░░░░░░░░░▒▒░░░░░░░░░░░░░░░░▒░░░░░░░░░░░░░░░░░░░░▒▓▓░░ \n" + " ░░ ░▓▓░ ░░░░░░░░░░░░░░░░░░░░░░░░ ░ ░ ░░░░░░░░░░░░░░░░▒░░░▒░▒░▒░▒▓▓███▒▒░ \n" + " ░▒░░▒█▓██▒ ░░░░░░░░░░░░▒░░░░░░░░░░░░░░░░░▒░░░▒░▒░░░▒░▒░░░░░░░░░░░░▒▒▓▓░░▒░ \n" + " ▒▒░░▓▓▓███▒░░▒░░░▒░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ ░░░░░░░░░░░░░ ░░░░▒░\n" + " ░░░░░░ ░ ░░ ░ ░ ░ ░░░░░░░░░░░░░░░░▒▒▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓\n" + "░▒▒░░░░░░░░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒\n" + " ▓▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒▓▒▓▒▓▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ \n" + " ░▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▒▓▓▓▒▓▒▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░ \n" + " ▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▒▒░░▒░▒░░░░░░░░░░░░░░░░░░░░ ░ ░░░ ░ \n" + " ░░ ░░░░░░░ ░ ░ ░ ░ ░ ░░░ ░░░ ░ ░░░░░░░░░ ░░░░░░▒░▒░░░░ ░ ░ \n" + " ░░░░▒░░░▒▒▒░░░░ ░ ░ ░ ░ ░░░ ░ ░ ░░░░░░▒▒▒▒▒▒▒░░░▒░░ \n" + " ░░▓▓░░▒░▒░░░░░░░░ ░░░░░░░░░░░░░░░░░░▒░░ \n" + " ░▓▓░░▒░░░░░░ ░ ░░░░░░░░░░░░░░░░░░░░▒▒▒░░░░░░░░░░░░░░░▒▒░ \n" + " ░▓▒░░▒░░░░░░░▒░▒░▒▒▒▒▒▒▒▒▒░░░░░░░░░░░░░░░░░░░░░░░░░▒▒░░ \n" + " ░▓▓░░░░░░░░▒░░░░░░░░░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▓▓░░░░ \n" + " ░░░ ░▓▓▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░░ ░░░░ \n" + " ░░░░ ░▒▒▒▒▒▒▒▒▒▒▒░▒░░░░░░░░░░ ░ ░ ░░░░░░▒▒▒▒▒░ \n" + " ░░░░░░░ ░░░░░░░░░░▒░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▒▓▒▓▒▒▒▒▒░░ \n" + " ░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░░░░░░░░▒▒▒▒▒▒▒▒▒▒▓▒ \n" + " ░░░▒░░░░░░░░░░░░░░░░░░░░░▒░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░ \n" + " ░▒▒▒▒▒▒▒░▒▒▒▒▒▒▒▒▒▒▒░░░░░▒░░░░░░░░ " + "\nContornando o altar central esta nomeDoTio no chao, encharcado de sangue de um ferimento em seu peito e notaveis sinais\n" + "de tortura pelo corpo. Ele rastejava para fora de uma abertura no chao do templo que você sequer sabia da existencia\n" + "Seu amigo de longa data, foi o motivo de voce vir morar em nomeDaVila, salvando-o da perdição que se encontrava e dando\n" + "de certa maneira um novo motivo para continuar vivendo depois dos acontecimentos de seu passado. Ele balbucia algumas palavras\n" + "mas voce não entende, deve se aproximar para compreender melhor.\n" + "3-APROXIMAR-SE"); resp = e.nextInt(); while (resp != 3) { if (resp == 3) { break; } System.out.println("Numero invalido, tente novamente\n" + "3-APROXIMAR-SE"); resp = e.nextInt(); } System.out.println("Em seus bracos Henry reconhecendo sua feição começa a falar: - Eles levaram o fragmento do orbe que estavamos protegendo\n" + "por favor impeça que eles consigam utiliza-lo, por favor, eramos seus guardioes e agora o tomaram de nós\n" + "por favor o recupere e os impeça de ressuscitar nomeDoVilao. Você não entende muito bem a situação\n" + "Agora o unico clamor em seu pensamento é saber quem fez tal barbárie com todos e o questiona.\n" + "1-QUEM FEZ ISSO? 2-VOCE IRA FICAR BEM?"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { System.out.println("" + " \n" + " \n" + " ▒░ \n" + " ▒ \n" + " ▒ \n" + " ▒ ░▒▓▒░ \n" + " ▒ ░▓▒░▒███████▓▓░ ▒▓░▒▓▒ \n" + " ▒ ▒▓▓███▓▒▓▓▒▒▓▓▓████▓▓▓▓▓▓▓░ \n" + " ░▒▓▒▓▒ ░▒░▒▒▓▓▓▓█▓░▒░ ░░░░▒░░ ░░░ \n" + " ▒▓▓▒░▓█▓▒▓▒▓▓▓▒░ ▒░ ░░░░░▒░░ \n" + " ▒▓▒░░░▒▓▒▓▓▓▓▓▒░░▒▒░ ░░░░░░░░░░░░ \n" + " ▒▒▓▒▒▒▒▒▓▓▓▒▒░▓▓▒▒█▓▒ ░░░░░ ░░░░░░░░▒░░░▒░░ \n" + " ░▒▒▒▒░▒▒▒▒░░▒░▒████▓▒▒ ░░ ░░░░░░░░░░░░▒▓▒░░░ \n" + " ░▒░░▒░░░▓▓▓▓██▓▓▓▓▒▒░ ▒▓▒░░░░░░░░░░░░░░░░░░░▒▓▓█▓░░ \n" + " ▒███▒▒░▒▒▒▓██▓▓▓▓▓▓▒▒▒▒░▒▓▓░░░▒▒▓▒▒░░░░░░░ ▒▓▓█▓▒░ \n" + " ▓██▓▓░░▒▒▒▓▓▒▓▓▓▓▒▒▒▓▒░▒▒▒▒▒▒▒░▒░▒▒▒░▒▒▒ ░░▒▓▓▒▒░ \n" + " ▒▒▓▒▒░░░▒▒▒▒▒▓▒▒▒░▒▓█▒▓▒▒░░░░░░░░░░░░▒▒░▒▓ ░▒▓▒▒░░ \n" + " ▒▓███▒░ ░░░░▒▒▒▒▒▒█▓ ░▒▓▓▓▓▓▓▒▒▒░░░░▒░░░▓ ░▒▒▒░░ \n" + " ░▓█▓▓▓▓▒░ ▒▒ ░░▓█░ ░░░ ░▒░░▒▒▓▒░░▒░░ ▒▒ ░▒▒░░ \n" + " ▒▓███▓░ ░▓ ▒▓░▒▒▒▒▒▒▒░░ ░░▒▒▒▒░ ░▓▒ ▒▓▒▒▒▒░░ \n" + " ▒▓▓▓▒▒░ ▓▒ ▒▓▓▓▓▓▒▒░▒░░░▒░░░▒▒░ ░▒▒▒▒▒▒▒▒▒▒▒▒░ \n" + " █▒▒▓▓▓▓▒░░▒▒░ ░░▒▒▓▓▓▓▒▒▒▒▒░░▒░░░░ ░░░ ░ ░ ░▒▒▒▒▒▒▒▒░░░ \n" + " ▒▓▒▓▓▓▓███▓▒▒ ▓▓ ▒▒▒░▒▒▒▒▒▒░▒▒▒░ ░░░░░░░░░ ▒▒░░ \n" + " ▒▒▒▒▒▒▒▓▓▓▓▓▒▓█▓▓▒▒▓▒ ░░ ░▒▒░░▒▒░▒▒▒░░░░░░░░░░ \n" + " ░▒▒▓▓▓▓▓▒███▒▒░ ░░▒▒▒░░▒▒▒░░░░░▒░░░░░ \n" + " ▒▓▒▒▒▓████▓ ▒▒▒░▒▒▒▒▒░▒░░░▒▒▒▒░░░ \n" + " ▒▒ ░▓███▓ ░░░▒▒▒░▒░░░▒▓▓▓▓▒▒░░░░ \n" + " ░▓▒ ░ ░░░▒▒░░▒▓▓▓▓▒▒▒▒▒░░░░ \n" + " ░▓▓░ ░░ ░░▒▒▓▓▓▓▒▒░░░░ \n" + " ░▓▓▒ ░▒▓▓▓▒▒▒░░░░░ \n" + " ░▓▓▒ ░▒▒▒▒░░░▒▒▒░ \n" + " ▓▓▒ ░░░░▓▒▒▒▒▒░ \n" + " ▒▓▒░ ▒▓░▒▒░▒░ \n" + " ░▓▓░ ░░░▒░░▒ \n" + " ░▒▒░ ░▒░▒▒▒ \n" + " ░▒▒ ░░▒▒▒░ \n" + " ▓▓ ░░▒▒ \n" + " ░▒░ ░▒▒▒ \n" + " ░░ ▒▒▓ \n" + " " + "\nQuem fez isso com todos? -voce pergunta algumas vezes- nomeDoTiu esforça-se para lhe responder mas parece que\n" + "a vitalidade esta indo embora de seu velho corpo, esta batalha ele não vencerá… Com suas ultimas forcas ele responde que\n" + "foram os nomeDoBando, novamente pede para que voce os impeça, mas dessa vez termina a frase dizendo para voce pegar a arma\n" + "que esta dentro de uma porta secreta na porta secreta abaixo do templo, apesar da tortura eu nao lhes contei sobre esta\n" + "por favor a use bem, é a unica arma que pode derrotar quem o grupo nomeDoBando buscam, procure nos outros santuarios de \n" + "nomeDoSantuario por mais informações, por favor os impeça…. com estas ultmas palavras seus olhos perdem a vitalidade\n" + "ficando de um jeito estático desconfortante. Largando seu corpo calmamente voce observa a passagem aberta."); break; } if (resp == 2) { System.out.println("" + " \n" + " \n" + " ▒░ \n" + " ▒ \n" + " ▒ \n" + " ▒ ░▒▓▒░ \n" + " ▒ ░▓▒░▒███████▓▓░ ▒▓░▒▓▒ \n" + " ▒ ▒▓▓███▓▒▓▓▒▒▓▓▓████▓▓▓▓▓▓▓░ \n" + " ░▒▓▒▓▒ ░▒░▒▒▓▓▓▓█▓░▒░ ░░░░▒░░ ░░░ \n" + " ▒▓▓▒░▓█▓▒▓▒▓▓▓▒░ ▒░ ░░░░░▒░░ \n" + " ▒▓▒░░░▒▓▒▓▓▓▓▓▒░░▒▒░ ░░░░░░░░░░░░ \n" + " ▒▒▓▒▒▒▒▒▓▓▓▒▒░▓▓▒▒█▓▒ ░░░░░ ░░░░░░░░▒░░░▒░░ \n" + " ░▒▒▒▒░▒▒▒▒░░▒░▒████▓▒▒ ░░ ░░░░░░░░░░░░▒▓▒░░░ \n" + " ░▒░░▒░░░▓▓▓▓██▓▓▓▓▒▒░ ▒▓▒░░░░░░░░░░░░░░░░░░░▒▓▓█▓░░ \n" + " ▒███▒▒░▒▒▒▓██▓▓▓▓▓▓▒▒▒▒░▒▓▓░░░▒▒▓▒▒░░░░░░░ ▒▓▓█▓▒░ \n" + " ▓██▓▓░░▒▒▒▓▓▒▓▓▓▓▒▒▒▓▒░▒▒▒▒▒▒▒░▒░▒▒▒░▒▒▒ ░░▒▓▓▒▒░ \n" + " ▒▒▓▒▒░░░▒▒▒▒▒▓▒▒▒░▒▓█▒▓▒▒░░░░░░░░░░░░▒▒░▒▓ ░▒▓▒▒░░ \n" + " ▒▓███▒░ ░░░░▒▒▒▒▒▒█▓ ░▒▓▓▓▓▓▓▒▒▒░░░░▒░░░▓ ░▒▒▒░░ \n" + " ░▓█▓▓▓▓▒░ ▒▒ ░░▓█░ ░░░ ░▒░░▒▒▓▒░░▒░░ ▒▒ ░▒▒░░ \n" + " ▒▓███▓░ ░▓ ▒▓░▒▒▒▒▒▒▒░░ ░░▒▒▒▒░ ░▓▒ ▒▓▒▒▒▒░░ \n" + " ▒▓▓▓▒▒░ ▓▒ ▒▓▓▓▓▓▒▒░▒░░░▒░░░▒▒░ ░▒▒▒▒▒▒▒▒▒▒▒▒░ \n" + " █▒▒▓▓▓▓▒░░▒▒░ ░░▒▒▓▓▓▓▒▒▒▒▒░░▒░░░░ ░░░ ░ ░ ░▒▒▒▒▒▒▒▒░░░ \n" + " ▒▓▒▓▓▓▓███▓▒▒ ▓▓ ▒▒▒░▒▒▒▒▒▒░▒▒▒░ ░░░░░░░░░ ▒▒░░ \n" + " ▒▒▒▒▒▒▒▓▓▓▓▓▒▓█▓▓▒▒▓▒ ░░ ░▒▒░░▒▒░▒▒▒░░░░░░░░░░ \n" + " ░▒▒▓▓▓▓▓▒███▒▒░ ░░▒▒▒░░▒▒▒░░░░░▒░░░░░ \n" + " ▒▓▒▒▒▓████▓ ▒▒▒░▒▒▒▒▒░▒░░░▒▒▒▒░░░ \n" + " ▒▒ ░▓███▓ ░░░▒▒▒░▒░░░▒▓▓▓▓▒▒░░░░ \n" + " ░▓▒ ░ ░░░▒▒░░▒▓▓▓▓▒▒▒▒▒░░░░ \n" + " ░▓▓░ ░░ ░░▒▒▓▓▓▓▒▒░░░░ \n" + " ░▓▓▒ ░▒▓▓▓▒▒▒░░░░░ \n" + " ░▓▓▒ ░▒▒▒▒░░░▒▒▒░ \n" + " ▓▓▒ ░░░░▓▒▒▒▒▒░ \n" + " ▒▓▒░ ▒▓░▒▒░▒░ \n" + " ░▓▓░ ░░░▒░░▒ \n" + " ░▒▒░ ░▒░▒▒▒ \n" + " ░▒▒ ░░▒▒▒░ \n" + " ▓▓ ░░▒▒ \n" + " ░▒░ ░▒▒▒ \n" + " ░░ ▒▒▓ \n" + " " + "\nA vitalidade esta indo embora de seu velho corpo, esta batalha ele não vencerá… Com suas ultimas forcas ele responde que\n" + "foram os nomeDoBando, novamente pede para que voce os impeça, mas dessa vez termina a frase dizendo para voce pegar a arma\n" + "que esta dentro de uma porta secreta na porta secreta abaixo do templo, apesar da tortura eu nao lhes contei sobre esta\n" + "por favor a use bem, é a unica arma que pode derrotar quem o grupo nomeDoBando buscam, procure nos outros santuarios de \n" + "nomeDoSantuario por mais informações, por favor os impeça…. com estas ultmas palavras seus olhos perdem a vitalidade\n" + "ficando de um jeito estático desconfortante. Largando seu corpo calmamente voce observa a passagem aberta."); break; } System.out.println("Numero invalido, tente novamente\n" + "1-QUEM FEZ ISSO? 2-VOCE IRA FICAR BEM?"); resp = e.nextInt(); } System.out.println("1-IR PELA PASSAGEM"); resp = e.nextInt(); while (resp != 1) { if (resp == 1) { break; } System.out.println("Numero invalido, tente novamente\n" + "1-IR PELA PASSAGEM"); resp = e.nextInt(); } System.out.println("" + "▒▒▒▓▓▒▒▓▒▓▓▓▒▒▒▒▒▒▒▒░▒▒▒▒▒▒▓▒▒▒▒▓▓▓▓▓▓▓▓▒░ ░▓▓▓▓▓██▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓██▓▓▓▓▓▓ ░░▒▓▒▒▓▓▓█▒░▒▒▒▓▓▒▓▒▒▓▓▒▒▒▒▒▒░░▒▒▒▒▓▒▒▒▒▒▒░░ ▒▒▓▒▒▒▒▒░░ ░▒▒▓░▒▓▒▒▒▒▓░ ░ ░ ░▓▓▒▒▒░▒▒▒░░░▒▓▓▓█\n" + "▓▓░▒▒▒▒▒▒▒▒▒▓▓▓▓▓▒▓▒▒▒▒▒▒▒▓▓▓▓▓▓▓▒▒▒▒▒▓▓▓▓▒▒▓▓▓▓▓▒▒▒▒░▒█▓▓▓▓▓▒▓▓▓▓▓▓▓▒ ░▒▒▒▒▒▒▒▒▒▒▓▒▒▒▒ ▒▒▒▓█▓▒▓▓▓▓▓▓▓▓▒▒▓▓▓▓▓▓▒▒▒▒▓█▓░ ░▒▒▒▓▓▓▓▓▓▒▒▓▒▓▓░▒▓░▒▒▒▓▓░░▒░░▒▒▒▒▓▒▒▒▓▓▓▓▒▒▓▓▒▒▓▓▓▒▒▒\n" + "▒▓ ░▒░▒░░▒▒▒▒▒▒▒▒▒▒▒░▒▒▒▓▒░▒▒░░░▒▒▒░▒░▒▒▒▒▒▒▒▒░ ░▒░░░░░░▒░░▒▒▒▒▓▒▓▓▓▓ ░▓▓▒▒▒▒▒▒▒▒░▒▒ ░░▒▒▒░▒▓▒▓▓▒▒▒▓▒▒▓▓▒▒▒▒░░░░▒▓█ ▒▓▓▓▓▒▓██▓██ ░▒▒▒▒▒▒░▒▓▓▓▒▒▒▒░▒▒▒▒▒▒▒▓▓ ░▒▒▒▒▒▒░ ░\n" + " ▒▒ ▒▒▒▒░░░▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒ ░ ░░▒▒▒░▒▒▒▒▒▒▒▒▒▒▓▒ ░▒▒▒░░░░░░░ ░░▒▓▓▓░ ░▒▒▒▒▒▒▒▒▒░░░▒░░░ ░░░░░ ▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░▒▒ ▓▒ ░░▒▒░░ ▒▓▒▒▒ ░░▒▒░▒▒▒▒▓▓▓▓▒░░░▒▒▒░░░░▓▒ ░░▒▒▒▒▓▒░▒\n" + " ▒▓▓ ░▒▒▒░░▒▒▒▒░▒▒▒░░▒░▒▓▓░ ▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▓░ ▒▓▓▒▒▒▒▒▒░▒▒▒░░░░ ░▒▓▓▓▓░ ▒▒▒▒▒▒▒▒▒▒░░▒▒░░░░▒░░▒▒▒▒░░▒░░▒░▒ ▒░▒▒▒░▒▒▒▒░▒█▒ ░▓▒▒▒▒▒░▒▒ ░ ░▒▒░ ░░▒ ░░▒▒▒▒░▒▒▒\n" + " ░▒▓▓ ░░ ░ ░░░░▒▒░░ ▒▒▒▒░▒░░░▒▒▒░▒▒▒░ ▒▓▓▓▓▒▒▒▒▒▒▓▒░░░░░░░▒▓▓▓░ ▒▓▒▒▒▒▒▒▒▒▒░▒░░░░▒░░░▒▒▒░░▒▒░▒░▒▒ ░░▒▒░▒▓▓▓▓▓▓█▒ ▓▓▓▓▓▓▓▒█▓ ░▓▒▓▓▓▓▓▓▓█░ ░░░░▒▓▓░ ░░░▒▒░░▒▒▒\n" + "░░░░░░▒▒░░░░ ░ ░░▒▒▒▒▓▓▓ ░▒▒▒░▒░▒▒▒░░░ ░▒▒▒▒▒░ ░░▒░ ░░░░ ░░ ░░░ ░▒▓▓▒▒▒▒▒▒░▒▒ ░▒▒▒▒▒▓▒▒▒▒▓▓▓▓▒▒▒▒▒▒▓▒░▒▓▓▒▓▒▒▓░ ░▒▒▒▒▒▒█▓ ░▓▓▓▓▒▓▓▓▓▓▓▓▒ ░░▒▒▓▓▓▓█▒ ░░░░░▒▒░▒▒▒\n" + "▒▒▓░ ░ ░▒▒▓▒░▒▒▒░▒▒▓▒▒░ ░▒▓▓▓▒░░░░░░░▒░▒▒░▒▒▒▒░░▒▒░ ░▒▒▒▒▒▒░░░░░░▒▒░ ░░▒▒▒░▒▒▒░░ ░░▒░ ░▒▒ ░ ░ ░░░░░▒░░ ▒▒▒▒▒▒ ░▓▓▓▓▒▒▓▓▓▒░▒▒▓▓ ░░▒▓▓██▓ ░ ░▒▒▒▒\n" + "▓███▓▓░░▒▒▒▒▒▒▒▒░▒▒▒▒▒░▒▒▓▓░ ░ ░░░▒░▒▒▒▒▒░▒▒▒▒░ ░░▒▒▒ ░▒▒▒▒▒▒▒░▒▒░▒░▓▒ ░▒▒▒▒▒▒▒▒▒▒▒▒░▒▒░░░ ░░ ▒▓▓▒▒▒▒▒▒▓▓▒▒▓▓▒▓▒▒▒▓▒▒▒▒▒█ ▒▒▒▒▒▒▓▒▒▒▒▒▒▒▒█▒ ░▒▒▓▒░ ░▒▒▒▒▒▒ ░░░\n" + "▓████▒░▒▒▒▒▒▒▒▒▒▒▒▒░▒░░░▒▒▓▓░ ░░░▒░▒▒▒▒▒▒░░▒▒▒▒▒▒░░░░▓░ ░░░░░▒▓▒▒▒▒▒▒░▒▒▒ ░▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒░░░░▒▒░░ ▒▒▒▓▓▓▓▓▓▓▓▒▒▓▒░▒▓▒░▒▒▓▓▓▓▓▓▒░▒░░░▒▒▓▒▒▒▒▒▒▒▒▓▓ ▒▒▒▒▒▓▓▒▓▓▓█▓▓▓░ \n" + "▓▓██▒░▒▒ ░▒▒▒░▒▒▒░░░░░▒▒▒▓▒ ▒░ ░▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▒▒▒▒▒░░▒▒ ░▒▒░░ ░▒░░ ░░░ ░░░░░ ▒▒░░░▒▒▒▒▒ ░▒▒▒▒░▓▒▒▒▒▒▓▓▒▒░░▓▒▒▒▒▓▓▓▓▒▒▓░ ░░▒░░░▒▒░░▒░░ ░▒░▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▒ ░\n" + "▓███░░▒░ ▒▒▒▒▒▒░▒░▒░▒▒▒▒▓░ ▓░ ▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░ ░░▒▒ ░▒▒▒░▒▒▓▓▓▓▓█▓░▒▓▒▓▓▓▒░▒▒▒░░░▒▒░ ░░░▒░ ▒▒▒▒░▒▒░▒▒▓▓▓▒▓▒▒░▒▒▒▒▒▒▒▒▒▒▓░ ░▒▒▒▒▒░ ░▒▓▒▒▒▓██▓▓▓▓▓▓▓▓▒▓▓▒▒▓▒▒██ ░▒\n" + "░▓▒░ ▒▓▒░▒▒▒▒▒▒▒▒░▒▒ ▓ ░▒▒░░ ░░░░░░▒░ ░░░▒▒▒▒▒▒▒▓▒▒▒▓▒▒███▓▓▓▓▓▓▓▓▒▒▓▒▓▒▒▓█▓ ░▒▒▓▒ ░░▒▒▒▒▒▒▒░ ▓▓▓▒▓▒▒▒▓▒▓▓▓▓▓▓▓▓▓▒▒▓▓▒▒▒▒▓▒ ▒▒░░▒▒▒▒▓▓░ ▒▓▓▓▓█▓▓▓▒▓▓▓▓▓▒▒▓▓▒▓▒▒▒▓▓ ░░\n" + "▒▒▒▒▒▒▒▒░░░░░░▒▒▒░░▒▒▒▒▒░▒░ ░░░▒▒▒▒░▒ ░▒░▒▒▒▓▓▓▓▓▓▓▓▓▓▒░░▒▒▓▒█░░▓▓▓▓▓▓▓▓▓█▒░▓▓▓▒░▒▒▓▓ ░▓▓▓▓▓▓█▓██▓▓███▓ ▒▒▓▓▓▒▓▓▓▓▓▓▓▒▓▓▓▒▓▓▓▒▓▓▒▒▓░ ▒▒▒░░▒▒▒▓▓ ▒▒▓█▓▓▓▓▓▓▓▓▓▓▒▒▓▓▓▓▓▓▒▓▓▓ ░▒\n" + "▓▓▒▒▒▒▓▓▒▒▓█▒░▒▓▓▓▒▓▓▒▒▓▓▓▓▓▓▓▒▒▒▒▒▓█▓ ░▒▒▒░░░▒▒▓▓▓▓▓▓▓▒░░▒▒▓█░▒▓▒▓▓▓▓▓▒▓█▓░▓▓▒▓▓▒▒▒█▒▒▒▓▓▓▓▓▓▓▒▓▒▓▒▒▓██ ░░▒▒▓▓▓▓▓▓▓▓█▓▓▒▓▓▓▒▓██▒▓░ ░░▒▒▒▒ ▒▒▓▓▓▓▓▓▓▓▓▓▒▒▓▓▓▓▒▓▓▓▒▓█▒ ░▒\n" + "▒▒░▒▒▒▒▒▒▓▓▒▒▓▒▒▒▒▒▒▒▒▒▒▓▒▓▒▒▒▒▒▒▒▒▒▓█ ░▒▒▒░░░░▓▓▓▓▒▒▒▓▒▓▒▒▒▓▓▓ ▓▓▒▒▓▓▓▒▓▓▓▓▓▒▓▓▓▓▓▒▓▓▓▒░▒▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓ ░░ ░▒▒▒▒▒▒▒▒▒░ ░░ ░░ ░░░░░░ ░░▒░▒▒ ░▒░▒▒▒▒▓▓▓▓▓▓▒▒▓▓▓▒▒▒▒▓▒▓ \n" + "▒▓▒▒▒▒▒▓▒▓▒▓▒▒▒▒▒▒▒▒▒▓▒▒▒▒▒░░▒▒▒░▒░░▒█ ▒▒▒▒▒▒░░▒▓█▓▓▒▓▓▓▓▓▓▓▓█▓ ░▒▒░▒▒▒▒▓▓▓▓██▓▓▓▓█▓▓█ ░░▒▓▓▓▓▓▓▓▓▓▒▒▓▓▓▓ ░ ░░░░▒ ░ ░ ░▓▒▓▓███▓▓▓██▓█▓ ░▒▒▒▓▒▒▒▓▓▓▓▓▒▓▓▓▓▓▓▒▒▒▓▒ ░▒░\n" + "▒▓▓▒▓▓▓▓▒▒▒▒▓▒▒▒▒▒▒▒▒▒▒░▒▒░░░▒▒░▒░░░▒▓ ░ ░▒░▒░░░ ░▒▒░▒▓▓▓▓▒░▒░▒░░▒░░▒▒░░▒▓▓▓▒▒▓▓▓▓ ▒▒▓▓▓▒▓▓▓▓▓▒░▒▒▓▒ ▒▒▒ ▒▒░▒▒▒▒▒▓▓▓▓█▓ ░▓█▓▒▒▓█▓▓▓▓▓▓▓▓█░ ▒▒▓▓▓▒▒▒▓█▓▓▓▓▓▓▓▓▒░▒▒▓ ▒▓█\n" + "░▓▒▒▒▒░░░▒▒▒▒▒▒▒▒▒ ░▒▒▒▓ ░▒▒▒░ ░▒▓ ░▓▓▒▓▓▓▒ ░ ░▒▒▒▒▒▒▒▒ ░░░▒░ ▒▓▓▒▒▓█▓▓▓▓▒▒▓▓ ▒░▒░ ▒▒░░▒▒▒░▒░░▒▓█░ ░▒▒▓▓░▓▓▓▒▓▓▓▓▓▓██▒ ▒▒▒▒▒▒▒▒▓█▓▓▒▒░▒▒▒▓▒▒ ░▒▒▓\n" + " ░░░░▒▒░ ░▒▒▒ ░ ░░░░░ ░▒▒░▒░▒▓▓▒░▓▓█░ ▒▒▓▓▓▓█▓ ▒▒▒▒▒▒▒░ ░▒▒▒▒▓▓▓▒▒▒▓▒▓▓▓▓▒ ▒▓▒▒▒██▓▓▓▒▒▒░░ ░ ▒▒▒░▓▒░▒▒░░▒░░▒▒▓░ ░▒▒▒░░▒▓▓▓▓▓▒▒▒███▓ ░ ░░░ ░▒▒▒░░░ ░▒▒▒\n" + "▓▓▓▓▓▓███▓▒▒▒░ ░▒▒▒▓░▒░ ░░░░░░░░░▒▓▒░▒▒ ▒▒▒▓▒▓▓▒▓▓▒▓█▓ ▓▓▓▓▓▓▓▓▓▒▓▒▒▒▒▒▓▒▒▒▒▒▒▒▒▓▓▓▓▒ ░░░▒▒░░░░░ ░▒▒▒▒▒▒▒▒░▒▒▒░░▒░ ░▒░░░▒▓▓▓▓▓▒▒▒██▓ ░▓▓█▓░░▒░ ░▒▒▓▓▓▓▓▒▒ ░\n" + "▓▓▓▓▓▓▓▓█████░░░░░░▒▓▓▒▓▒ ░░░░░░░▒▒░▒▒▒▒▓▒ ░░░░▒▓▒▓▓▓▒▓▓ ░▓▓▒▓▒▒▓▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒▒▓█░ ░▒▓▒▓▒▒▒▒▒▒▒▓▒▓▒░ ░▒▒▒▒▒▒▒▒▒ ░▓▓▓▒░ ░▒▓▓▓▓▓▓██░▒▓▓▓▓▓▓▓▓█▓ ░▓▒░ ░▓▓▓▓▓▓▓▓▓▒▒▒\n" + "▓▒▓▒▒▓▓▒▒▓▓▓▓░░░ ░░▒▒▓▓▒ ░░░ ░░░▒▒▒▒░▒▒▓█▓ ░░░▒▒▒▒ ▒▓▒▓▓▓▓▓▒▓▓ ░▒▒▒▒▒▒▒▒▒▒▒▒▓▓▓▒▒░░░░▒▓▓▓▓▓▓▒░▒░░▒▓▓▓▓▓▓▒░ ░▒▒░ ░▒▒▓▓▓▓▒▒▒▒░ ░▒█▓░ ▒█▓▓▓▓▓▓▒▓▓▓ ░▒▒░░░░▓▓▓▓▓▓▓▓▓▓▓\n" + "▓▓▓▒▒▒▓▒▒▓▓░▒▒░ ▒▒▒░ ░░ ░░▒▒▒▒▒░▒▒▒▒▒░▒▒▓▓█▓▒▓▓▒░ ░░░░▒▒▓▒▒▒ ░▒▒▒▒▓▒▓▓▓▓▓▓▒░ ░░▒░░▒▓▓░▓▓▒▓▒▒ ░▒▓▒▒▒▒▓▓░ ░░▒▒▒▒▒▒▒▒▒▒▓▓▓▒ ▒▒▓▓▒▓▓▓▓▓▒▓▓▓▒ ░▒▒▒░░▒▒▒▓▓▓▓▓▓▓▒▒\n" + "▒▒▒▓▓▓▓▓██▓░░ ░░ ░░░▒░ ░ ░ ░▓░ ▒▓▒▒▒▒▓▓▓▓▓▒▓▓▓▓█▒▒ ░░▒ ░▒▒░░▒▒▒▓▓▒░░░ ░░░░░░▒▓▓▒▒▒░▓▓▒░░░▒▒▒▒▓▓█▓ ░▒▒░▒▒▓▓▒▒▒▒▒▒▒▒▒▓▒ ░▒▒▒▓▓▒▒▒▒▒▓▒▓▓▓█▒ ░▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▒\n" + "░░░░░░▒░░░ ░▒▒▓▓▒░▒▒▒▓▓▓▒ ░▒▒ ░▓▓▓ ░▒▒▒▒▓▓▒▓▓▓▓▓▓▓▒▒▓▓▓▓▒ ░▓▓▒▒▒▒▒▒░▒ ░▒░▒▒░░░▒▒▒▒▒▓▓▓▒░░░▒▒▒░ ░ ▒░░ ▒▒▓▒▒▒▒▒▓▒▒▒▒▒▒░░▒▓▓ ░▒▒▒▒▒▓▓▒▒▒░▒▓▓▒▒▒█▓ ░▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓\n" + " ▒▒▒▓▓▒ ░▒▒▒▓▒▒▒▒▒▓▓▓▓██▓███▓░░▒▒ ░ ░▒▒▒▒▒▓█▓▓▓▓▓▓▓▓▒▒▓▓█▒ ░░░▓▓▓▓▓▓▓▓▓▓▓ ░▒░░ ░░░░░░░ ░░░▒▒░▒▓▒▒▒ ░▒▓▓▒░▒▒▒▒▓▒▒▒▒▒▒▓░░▒▒▒▒▒▒▒▓▒▒▓▒▒░▒▒▒▒░░ ░░▒▓▒▓▓▓▓▓▓\n" + "▒▒▒▒▒▓▒▓██ ░▒░▒▒▒▒▒░▒▒▓▓▒▓▓▓▓▓█▓███▓▓▒░░░ ░▒▒▒░▒▓▓▓███▓▓█▓▓▓█░ ░░░░██▒▓▓▒▓▓▓▓█░ ░░▓▓░░▒▒▓▓ ░█▓▓▓▓█▓ ░▓▓▓▓▓▒░▒▒▒▒▓▓▒ ░▒▒▒▒▒▒▒▒▓▓▓░░ ▒▓▒▒▒▓▒▒▓▒░▒▒░ ░░▒▓▒▒░▒ ░ ░ ░░ ░░\n" + "▒▓▓▒▒▒▒▒█▓ ▒▒▒▒▒░▒▓▓▒░▒▒▓▓▓▓▓▓▓▓▓▓██▓███▓░ ░▒▒▒▒▒▒▒▓▒▒░▒░░░▒ ░▒▒▓▒▒▒▓▓▓▓▓▓░ ▒▓████▓▓▓▓▓░▒▓▓▓▓▓██░ ▒▒▓▒▒▒▒▓▒▒▒▒▒▓▓▒ ░░░░░░░▒░ ░▒░ ░▒▒▒▓▒▓▓▓▓█▓█▓ ░ ▒▒▓▓▒░░ \n" + "▒▓▓▒▒▒▒▒█▒ ▒▒▒▒░▒▓▓▒░▒▒▓▒▓▓▓▓▓▓▓▓▓▓█▓▓█████▓ ░░░░░░░░▒▒▒▒░ ░▒▒░░▒▒▓▓▓ ░▒▓▓▓▓▓▓▓▒▓▓▓▓▒▒▒▓▓▓▓░ ▒▒▒▒▒▒▒▒▒▒▒▒▒░░ ░░░░░░▒▒▒ ░░░░░░░▒░ ▒▓▓▓▓▓▓▒░▒▓▓▒▒▓▓▓░▒░ ░░ ▒░ ░░░\n" + "▒▓▓▒▒▒▒▒█▓ ░▒▒▒▒░▒▒▓▒▒▒▒▒▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▒░▓▓░ ▓▓▓▓▓██▓▓▓▓▓▓▓▓▓▓░ ░▒▒▒▒░ ▒▓▓▓▓▒▒▒▒▓▓▓█▓▓▓▓▓▓█▒ ░░░░░░▒▒▒▒▒▓▓▓▓▓▓▒▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ ░▒░▒▓▓▓▓▓▓▓▓▓▒▒░▓█▒▒▒░▒▓███ ░ ░\n" + "▒▒▓▒▒░▒▓█▓ ░▒▒▒░▒▒▓▓▓▓▒▒▒▒▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓██▓██░▓▓█▓▓▓▓▓█▓▓▓▓▓▓▓████ ▒ ░▒▒▒▒▒▒▓▒░░░▒▒▒▒▒▓▓█▓▓▓▓█▓ ░░░▒░░░░▒▒░░▓▒▒▓▓▓▒░▒▒▒▒▒▒▒▓▓▓▒▓▓▒▒▓▓█▓ ▒░▒▒░▓▓▓█▓▒▓▒▓▓██ ▒▒▓▒▓██▒ ░░▒▒\n" + "░░░░░░ ░ ░▒▒▒▒▒▓▓▓▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓█▓█▒▓░▒▓▓▓▓▓▓▓▓▓▓▓▒▒▓▓▓▓███ ▒▒ ░░ ░▒▒▒▒▒▒░░░░▒░░░▒▒▒▒▒▒▓░ ░░░░░░░░░░▒▒▒▒▒▒▒▒▒▒▒▒▓▓▒▒▒▓▓▓▒▒▓▓▓▓▓▓█░ ▒▒░░ ▓█▓▓▓▒▓▓▓▓█▒░░▒▓▒██▓ ░▒▒▒░\n" + "▒▓▒▓▓▓▓▒ ▒▒▒▒▓▓▓▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▓ ░░▒▓▓▓▓▓▒▒▒▓▒▒▓▓▓▓▓███░ ░▒▒▒░░▒▒▒▒░░▒▒▒▒▒▓▒▒░▒░▒▒▒▓▓ ░░░░░░░░░░▒▒▒▒▒▒▒▓▒▒▓▒▒▒▒▓▓▒▒▓▒▓▓▓▓▓▓▓█▒░▒░░░░▒██▓▓▒▒▒▓█░░▓▒ ▒███▒ ░▒▒▒▒░\n" + "▒▓░▒▒▒▓█▓░ ░▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███▓ ▒▒▒▒░░▒▓▒░░▒▒▓▒▓▒▒▓██▓ ░░░▒▒░▒▒▒▒░▒▒▒▒▒▒▒▒▒▒░▒▒▒▒▒▒▓▒ ░░░░░▒▒▒▒▒▒░░░░░░░░░░ ░▒▒▒▒▒▓▓▓▓▒▓▓▓█▒░░░░░░░▓▓▓▓▓▓▓█░ ▒█▓▓▓███░░░▒▒▒░▒\n" + "▓▒▒░▒░▒▒▓▓░░ ░░▒▒▒▒▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▒▒▓▓▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ ░▒░░░▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▓▒ ░ ░░░░░░░░░▒▒▒░░░░░░░░░░░░▒▒▒▒▒▓▓▓▓▓▒ ▒▒░▒░░ ▓▓▒██▓▓░ ░▒▓██▓ ░▒▒▒░▒▒\n" + "▓▒▒░▒▒░▒▓▓▒ ░░▒░▒▒▒▒▒▒░░░░░ ░ ▒▒▒▒▒▒░░▒▒░░░▒▒▒▒▒▒░ ░░▒▒▒▒░░▒░░▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒░░▓▓ ░ ░░▒▒▒░░░▒░▒░▒░▒░░░░░░░░ ░░▒░░░░▓▓▒▒░ ░▒ ░░▒░▒▒░▒\n" + "▒▓▒▒▓▒▒▓█░ ░▒▒▒▓▒▒▒▒░▒░ ░░ ░░░ ░░ ░ ░░ ░▒░▒▒▒▒▒░░▒░░░░░▒▒░░▒▒▒▒▒▒▒▒▓▓ ▒▒▒▒▒▒▒▒▒░▒▓▒ ░░░░░░ ░░ ░░▒▓▒ ▒▒▒▒▓▓▒▒▒▒▒░ ░░░░░░░░▒\n" + "▒▓▒░▒▒▒▓░ ░ ░░░ ░ ░░░░ ▓▓▒ ▓▓▒▒█▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▒▒░░░ ░░▒▒▒▒▒▒▒░▒░░▒▒░▒░▒▒▒▒▒▒▒░▒▒▒▒▒▒▒░░░▒▒▒▒▓▓░ ░░▒▒▓▓▓▒▓▓█▒▒▒▒▒▒▓▓██▓ ░▒▒▒▓▓▓▓▓▒▒▓░ ░░░░░░░░▒\n" + "▒▒▓▒▒▒▒ ░▓▓▓▓█▓▓▓▒▒░▒░▒░▒▒░ ░░ ░▓▓░▓▒▓▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▒▓▓▒░ ░░░▒▒▒▒▒░▒▒▒▒▒░░░░░ ░░ ░░░░▒▒▒▓▓▓█▓███████████ ░░▒░▒▓▓▓▒▒▒▒▓▒▒▒░░▒▒▒▒▒▒▓▓▓▓▓▓▓▒ ▒▒ ░░░░░░░░░▒" + "\ndescendo a pequena escada voce avista o pequeno altar ao fundo, onde provavelmente estava o fragmento do orbe.\n" + "Observando a pequena sala, com suas paredes de pedra com varias inscricoes em alguma linguagem\n" + "desconhecida a voce, como o tio Jorge lhe disse voce procura pela pedra que abre a tal porta secreta\n" + "1-EMPURRAR A PEDRA AO NORTE 2-EMPURRAR A PEDRA AO SUL 3-EMPURRAR A PEDRA AO OESTE 4-EMPURRAR A PEDRA AO LESTE"); resp = e.nextInt(); while (resp != 3) { Som.emppedra(); if (resp == 3) { System.out.println("Voce percebe que esta pedra pode ser empurrada, a pressionando algumas outras pedras abrem\n" + "mostrando uma pequena adaga com varias inscricoes runicas e um cabo transparente de vidro, ali dentro\n" + "parecia haver alguma substancia liquida que parecia mexer-se sozinha."); break; } System.out.println("Não é essa pedra, tente novamente\n" + "1-EMPURRAR A PEDRA AO NORTE 2-EMPURRAR A PEDRA AO SUL 3-EMPURRAR A PEDRA AO OESTE 4-EMPURRAR A PEDRA AO LESTE"); resp = e.nextInt(); } fase3(); return 0; } public void fase3() throws Exception { Scanner e = new Scanner(System.in); System.out.println("2-SACAR SUA ADAGA"); int resp = e.nextInt(); if (resp == 2) { System.out.println("" + "███████████████████████████████████████████████████████████████▓█████\n" + "██████████████████████████████████████████████████████████████▓▒▒▓███\n" + "████████████████████████████████████████████████████████████▓░▓▓ ▒██\n" + "███████████████████████████████████████████████████████████▓░▓▒ ▒███\n" + "█████████████████████████████████████████████████████████▓▓▓▓░ █████\n" + "███████████████████████████████████████████████████████▓▒▓▓▒░ ▓██████\n" + "████████████████████████████████████████████████████▓▒▒▒▒▒░ ▓███████\n" + "██████████████████████████████████████████████████▒▒▒▓▓█▒░ ░▓████████\n" + "████████████████████████████████████████████████▓▒▒▓▓██▒░ ░▓█████████\n" + "███████████████████████████████████████████████▒▒▓▓██▓▒░ ░▓██████████\n" + "██████████████████████████████████████████████▒▒▓███▓▒ ░░▓███████████\n" + "█████████████████████████████████████████████▓▒▓███▒░░░░▓████████████\n" + "████████████████████████████████████▓███████▒░███▓░ ▓█████████████\n" + "█████████████████████████████████▓▓▓▓▓█████▒░▒██▒ ▒███████████████\n" + "████████████████████████████████▓▓████▓▒▒▒░ ▒▓▓░ ▓█████████████████\n" + "██████████████████████████████████████▒▒▓▒▒▒▒░▒░░░███████████████████\n" + "█████████████████████████████████████▓▒▓▒▒▒▒▒▒░░ ░▓▓██▓▒▒▓██████████\n" + "████████████████████████████████████▓▒▒▒░▒▒░▒▓▓░░▒ ░▒▒▒▓████████████\n" + "████████████████████████████████████▓▓▒▒▒▓▒▒▓▓▓████▓▓████████████████\n" + "████████████████████████████████████▒▓▓▒▓▒░▒▒▓███████████████████████\n" + "██████████████████████████████████▓▓▓▒▓▓▓▓░▓▒▓███████████████████████\n" + "█████████████████████████████████▒▓▓▒▒▓▓▒ ▒▒▒▓███████████████████████\n" + "████████████████████████████████▒▓▓▒▓▓▓▒ ▓▓▓█████████████████████████\n" + "███████████████████████████████▓▒█▒▒▒▓▒ ▓████████████████████████████\n" + "██████████████████████████████▓▓█▓▒▒▓▓ ░█████████████████████████████\n" + "█████████████████████████████▓▓█▓▓▓▓█▓ ▓█████████████████████████████\n" + "████████████████████████████▓▓█▓▓▓▓▓ ▓██████████████████████████████\n" + "███████████████████████████▓▓▓▓▓▓▓ ▒████████████████████████████████\n" + "█████████████████████████▓▓▓▒▓▓▓ ▒██████████████████████████████████\n" + "███████████████████████▓▓▓▓▒▓▓▒ ▒████████████████████████████████████\n" + "██████████████████████▓▓▓▓▓▓▒▒▒██████████████████████████████████████\n" + "████████████████████▓▓▓▓▓▓▒░▓████████████████████████████████████████\n" + "██████████████████▓▓▓▓▓▓▒▒▓██████████████████████████████████████████\n" + "█████████████████▓▓▓▓▓▒▓█████████████████████████████████████████████\n" + "██████████████▓▓▓▒▒▒▓▓███████████████████████████████████████████████\n" + "█████████████▓▒▒▒▒▓██████████████████████████████████████████████████\n" + "███████████▓▓▓▒▓█████████████████████████████████████████████████████\n" + "█████████▓▓▓▓▓███████████████████████████████████████████████████████\n" + "██████▓▓▓▓███████████████████████████████████████████████████████████\n" + "█████████████████████████████████████████████████████████████████████" + "\nCom a adaga em mãos voce com toda a sua raiva e desejo de vinganca decide partir em busca de respostas\n" + "e sangue por parte dos agressores. Achava que o passado de lutas e tragédias tinha ficado para tras\n" + "mas novamente ele bate a minha porta, pensa voce. Está na hora de voltar a ser quem eu era.\n" + "Com esta frase dita em sussurros para si mesmo, voce pensa em seus antigos armamentos, escondidos\n" + "em um bau no porão da sua casa, voltando por todo o caminho de destruição já visto. "); } System.out.println("1-SAIR DO ALTAR SECRETO"); resp = e.nextInt(); switch (resp) { case 1: { System.out.println("Novamente dentro do templo voce observa os corpos, nada mais importa, somente a vinganca, uma ultima olhada\n" + "em seu mais fiel amigo nomeDoTiu, voce lembra de suas palavras e agora passando por toda a carnificina\n" + "encontra-se no meio do templo, encarando a saida e as chamas do lado posterior."); } } System.out.println("1-SAIR DO TEMPLO 2-SAIR DA VILA"); resp = e.nextInt(); if (resp == 1) { System.out.println("Apenas passando por toda a destruição voce segue seu caminho. nada mais importa, apenas a vinganca."); } if (resp == 2) { System.out.println("Caminhando com os passos pesados de furia voce dirige-se para sua velha casa, a pequena trilha que\n" + "voce sempre passou para visitar seus amigos agora so lhe tras as visões de seus corpos estirados ao chão\n" + "sem vida. Ao longe voce avista sua casa, mais lembrancas surgem, tudo ali de algma forma teve algum\n" + "envolvimento com essas pessoas que lhe acolheram tão bem, e agora já não existem mais. "); } System.out.println("" + "" + " \n" + " ░ \n" + " ░ ░▒ \n" + " ░░ ▒░ \n" + " ▒ ░ \n" + " ▓ ▒▒ \n" + " ▒█▓▓█▓ \n" + " ░▒░░░▓█ ░ \n" + " ▒▒▒▒█▓ ░░ \n" + " ▒▒▒▒█▓ ░░ \n" + " ▒▒░░█▓ ░ \n" + " ░▒▓▓▓▓▓▓▓▒█▓ ▒ \n" + " ▒▓████████████▓░ ▒█▓░ \n" + " ░███▓▓▓▓███████████▓▒░░ ▒▒▓▓ \n" + " ▓█▓▓▓████▓█▓███▓███████████▓░ ▒▒▓▒ \n" + " ░█▓ ▒▓███▓▓▓█▓▓▓█▓███████████ ▒▒▓▒ \n" + " ░█▒ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███████▓ ░▒▓█▓███▓ \n" + " ░█▒ ▒▓▓▓▒▒░ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████▓█▓████▓ \n" + " ░█░ ░▒▓█▓▓█▒ ▒█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████░ \n" + " ░█▒ ░░ ▒████▒ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓████▓ \n" + " █▒ ░░░▒▓▓██▒ ░ ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████▓ \n" + " ▓▓ ░░▒░░▒▓▓▓▓░░░░ ▒▓▓▓▓▓▓▓▓▓▓█▓██▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████ \n" + " ▓▓ ░▒▒▒░░▒░░░░░▒░░ ▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓███▓██ \n" + " ▓█ ▒▒▒▒░░░░░░░░▒▒▒▒░ ▓▓▓▓▓▓▓▓▓▓▓▓▓█▓███▓▓▓▓▓▓▓▓▓▓▓▓▓█▓ ██ \n" + " ▒█░ ░▒▒▒▒░ ░▒░░░ ▒▒▒▒░ ░▓▓▓▓▓▓▓▓▓▓▓███████▓▓▓▓▓▓▓▓▓▓▓██░ ░ ██ \n" + " ▓▒ ░░▒░░░▒▒▒▒▒▒▒▒▓░░░░░░ ░▓▓▓▓▓▓▓█████▓░░▒▓███▓▓▓▓▓████▓▒▒▒▒ █▓ \n" + " ░ ░░▒▒▒░░█▓▓██████▒ ▒▒░░░▒░▒ ▒███░░░ ░▒▒ ▒▓░████▓▒▒▓▓▒███▒▒░▒█ \n" + " ▒▓░░▒▒░▓▓▓▓▓▓▓▓█▒░▒░░▒░▓░ ░██▓ ░ ░██░ ░ ████ ░▓▒▓█▓▓▓▓ \n" + " ▒▓▒░░▒░▓▒▓▓▓▓███▒░░▒▓▒▒▓░ ░▓▓▓░░░░░██▒░ ░░██▓▓░░░▒▓▓▓▓▓▓▓▒ \n" + " ▒▒▒▒░░░█▓▒▓▓████▒ ▒▒▒▒░▓░ ░▓▓▓░░▒▒░▓█░░░░░▒██▓▓░▒░▒▓▓▒▓▓▒▓▓ \n" + " ░▒▒▒▒░▒▒▓▒▓▓▓▓▓▒░░▒░▒▒▒▓▓▓█▓▓▒▒░░▒▒░░░░░░░░░░░▒▓▒▒░░▓▒▓▓▓▓▓█▒ \n" + " ░▓▒▓▓▒▓▓▒▓█▓▒▓▓▓▒░ ▒▒▒▓▒▓▓▓▓██▓▒ ▒▒▒░░░▒ ░░░▒▓▓▓▓███▓▓███████░ \n" + " ░▒▓▓▒▓▓▒▒▒░▒▒░░░▒▒▓▓▒▒▓▒▒▒▒░░▒▒▓▒░░▒▒░░░░░░▒█▓▒▒▒▒▒▒▓▒▓█▓▒▓▓▓████▓░ \n" + " ▓▓▓▒░▒▒░░▒▒░░░░ ░▒░▒▓▒▒▒▒▓▒▒ ░░▒▓▓▓▒▒▒░░░░░▓▓▓▒▒▓▒▒▒░▒▒▒▒░░ ▒░▒▓██▓ \n" + " ░▒▒░░▒▓▓▒▒▒▒▒░ ░░░▒▒░ ░▒▓▒░░░ ░▒▒▒▒░▒▓▓▒▒▒▒▒▓▒ ░░ ░ \n" + " ░ ░ " + "\nCaminhando com os passos pesados de furia voce dirige-se para sua velha casa, a pequena trilha que\n" + "voce sempre passou para visitar seus amigos agora so lhe tras as visões de seus corpos estirados ao chão\n" + "sem vida. Ao longe voce avista sua casa, mais lembrancas surgem, tudo ali de algma forma teve algum\n" + "envolvimento com essas pessoas que lhe acolheram tão bem, e agora já não existem mais. "); System.out.println("1-ENTRAR NA CASA"); resp = e.nextInt(); if (resp == 1) { Som.porta(); System.out.println("Adentrando a residencia voce dirige-se para o alcapao sob a mesa da sala.\n" + "Voce move a mesa, e vizualiza um alcapao trancado por um velho cadeado."); } System.out.println("2-DESTRANCAR"); resp = e.nextInt(); if (resp == 2) { Som.cadeado(); System.out.println("" + " \n" + " ░▓▓▓▓▓▓▓▓▓▓▒ ░▒▒▒▒▒░░░░░░░ \n" + " ░█▓▓▓▓▓▓▓▓█▒ ▓▓▓▓▓▓▓▓█▓██▒░▓▓██▓▓▓▓▓▓▓▓░\n" + " ░▓▓▓▓▓▓▒▓▒▓▒▒▓▓▒▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓ \n" + " ░▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▒▒▒▓▒▓▓▒▒▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ░▓▓▓▓▓▓▒▓▒▓▒▒▓▓▒▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▒▓▓▓▓▒ \n" + " ░▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▒▒▓▓▒▓▓▒▒▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▓▒▓▓▓▒▒▓▓▓▒▒▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▒▓▓▓▓▒ \n" + " ▒▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▒▒▒▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▒▓▒▓▓▒▓▒▒▓▒▓▒▒▓▒▓▒▓▒▓▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▓▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▓▓▓▒▓▒▒▓▒▓▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▓▒▒▓▒▓▓▒▓▓▒▓▒▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▓▓▓▒▓▒▒▓▒▓▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▒▓▒▓▓▒▓▒▒▓▒▓▒▓▓▒▓▓▒▓▒▒▓▓▒▓▒▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▒▓▓▒▓▒▒▓▒▓▒▓▓▒▓▓▒▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓ \n" + " ▒▓▒▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▓▒▓▓▒▓▒▒▓▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▒▓▓▒▓▓▒▓▒▒▓▒▓▒▓▓▒▓▒▓▓▒▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▒▓▒▓▓▓▓▒▒▓▒▓▒▓▓▒▓▒▒▓▒▒▓▓▒▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▓▓▓▒▓▒▒▓▒▒▓▓▓▒▓▒▓▓▒▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▒▓▓▓▓▒▒▓▓▓▒▓▓▓▓▓▒▓▒▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▒▓▓▓▓▓▓▓▓▓▓▓▓▓ \n" + " ░░░░░░▒▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▒ \n" + " ░░▓▒▒▒▒▒▒▒▒▒▒▒▒░░░░░░░░░░ ░ \n" + " ▒▒ ░░░░░░░░░▒▒▒▒▒▒▒▒▒▒▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ \n" + " ▒▒ ░░░░░ ░ ░░░░ \n" + " ▒▒ ▒▒▒▒▒▒▒▒▒▒▒▒▒░▒░▒░▒░░░░░░░░░░░░░░░░░ \n" + " ▒▒ ▒▒▒░▒░▒░▒░░░▒░░░░░░░░░░░░░░░░░░░ ░▒▒░ \n" + " ▒▒ ▒▒▒░▒░▒░▒░░░▒▒▒░░░░░░░░ ░░ ░▒░ ░ \n" + " ▒▒ ░▒▒▒░▒▒▒░▒░▒░▒ ░░░░▒░ ▒▓▓▓▒ ▒▓█▒ \n" + " ▒░ ░▒▒▒▒▒░▒░▒░▒░░ ░▒░ ░▓███████▓▒░░░ \n" + " ▓░ ░▒▒▒▒▒▒▒░▒▒▒░ ▓█████▓▓▒░░ ░▒▒▓▓███ \n" + " ░▓ ░▒▒ ░░░▒░ ▒███▓█▓███████▓▓▒░ ▒░ \n" + " ░▓ ░▒░ ▒▓▒▒ ▓▓▓▓▓▓█▓█▓▓▓▓████████▓▓ \n" + " ░▓ ▒▒ ▓████████▓▓▒░ ░▒▒▓▓▓█▓█▓█▓███▓ \n" + " ▒▓ ▒▒ █████████████████▓▓▒▒░ ░▒▒▓▓█▓ \n" + " ░▒ ▒▒ ▒██████████████████████████▓▓▒▒░ ░ \n" + " ▒▒▒▒▒▓▓▓▓██████████████████████████ \n" + " ░░▒▒▒▓▓▓▓████████████▒ \n" + " ░░▒▒░ " + "\nUsando a chave pendurada em seu pescoço, voce abre o cadeado do alcapao"); } System.out.println("3-ABRIR"); resp = e.nextInt(); if (resp == 3) { System.out.println("Dentro ha um velho baú, dentro dele ha algo que voce jurou nunca mais usar\n" + "mas desta vez era diferente, as mesmas pessoas que o fizeram realizar este juramento\n" + "já não estao neste mundo, voce não sente qualquer remorso em quebrar esta promessa\n" + "e leva suas maos nele."); } System.out.println("4-ABRIR BAU"); resp = e.nextInt(); if (resp == 4) { Som.itemLendario(); System.out.println("Dentro estão fotos de seu passado que você não gostaria de ter revisto, e alguns itens lendarios."); } System.out.println(""); System.out.println("0-EQUIPAR ARMADURA"); resp = e.nextInt(); if (resp == 0) { System.out.println("" + "██████████████████████████████████████████████████████████████████\n" + "██████████████████████████████████████████████████████████████████\n" + "████████████████████████████████████████████▓▓▒▓██████████████████\n" + "█████████████▓▒▒▒▒▓▓▓▒░░░░▒▓████████████▓▓▒░ ░▒▒▒▓▓███████████\n" + "█████████▓▓▓▒▓▓▓▓█▒ ▒██████████▓▓░▒▒▒▓▒▒▓▒░░░ ░▓████████\n" + "███████▓▓██▒████▓ ░▒▒░ ░░ ▒███████▒▒▓░▓█▓█▒▓███▓▓▓▓▒▒░ ▓█████\n" + "██████▓▓██░▓██▓▓ ░▓█▒ ░ ░▓███▓▒▒▓▒▓███▓▒███████▓▓▓▓▒ ▒████\n" + "█████▓▒▓█▓▓█▓▒▓░ ▓▓▒░ ░▒░ ▒▓▓▒░░▒█░▓▓▓▓ ▒▓▓▓▓▓▓▒▒▓▓▓▒▒░ ▓███\n" + "████▓░░░░▒▒░░░█ ▒▒▒░▓░ ▒░ ▓▓░▒▒▒▒▒█░░▒▒░ ▒░░░░░░░▓▒▒▒▒▒▒░ ░▒█\n" + "███▓░▒▒░ ░ ▒█ ▓▓▒░█░ ░░ ░▓▒ ░▓▓▒▒░█▒ ░░▒▒▓▓▓▓▓▓▓▓▒░ ░░░░ ▒█\n" + "████▒▒▒▒▒▒▒▒▒█▒░▓▓▓▒░▓▓▓▓▓▒▓▓▒▒▓▒▒▓▒ ▒██████▓▓▓▒░ ░▓▓███████\n" + "███████████▓▓▓░▓█▓▒▓▒▓▓▒▓▓▓▓▓▓█▓▓▓▓░ ░▒▒░ ▒▓█████████████\n" + "██████████████▒▓██████▓▒██████▓▓▒▓▒░ ▓░ ░ ▓██████████████\n" + "█████████████▓ ▒██████▓▒███████▓▒░ ░▒▓▓▓░ ░▒░ ▒██████████████\n" + "██████████████▒▓▒▓▓▓▓▓▒▓█▓▓█▓▒▓▒▒▒▓▓▓▓▒▒▒▓▓▒░▒▒▒▒ ▒███████████████\n" + "██████████████░▒▓▒░░░░ ▒▓▓▓▓▒░░▒██▓░▒▓▓▓░ ░▒▒▒░ ▒████████████████\n" + "██████████████▓ ▒▒ ░▒▒▒▓▓▓▒▒▒░▒▓▒▒████▓▓░▒░░ ░░░ ▓████████████████\n" + "███████████████▒ ▓▓▓▓▓▒▒▒▓▓▓▒▓▒░▒▓▓▒▒▒░▒▓▒▒▒▒▒ ▓████████████████\n" + "████████████████▓▒▓▒▓▓▓▓▒▒ ▒█▓▓▒░ ░▒▒▒▒░░░ ▒███████████████████\n" + "████████████████▓▒ ▒▒▒▓▒▒▒ ▒▓▓▓▒▒▒▒▒▒▒▒░ ▒███████████████████\n" + "█████████████████▓▓▒▓▓▓▒▒▓▒▒░ ░░▒░░░░░ ▒▓▓█████████████████████\n" + "███████████████████▒▓▓▓▒▒▒▒░▒░░░▒▓▒▒▒▒░░▒ ▓███████████████████████\n" + "██████████████████▓░░▒▒░ ░ ▒░░▒▒▓▒░▒▓ ░▒ ███████████████████████\n" + "██████████████████ ░█▓▓▓▓▓▓▒▒ ░░░░▒ ▒▒ ░░ ▓█████████████████████\n" + "████████████████▒▒░░▓▒▓▒▒▒▒▒▒▒░ ▓▓▓▓░▓▒░▓░░ █████████████████████\n" + "███████████████▓ ▒▓▓░░▒▒▒░░ ▓▒▒█░▒░ ▒░▒▓▒░░ ▓███████████████████\n" + "███████████████░ ░▓▒▓▒░░░ ▓▒▒▒░▒▒ ▒▒░▒▒░░░ ▓██████████████████\n" + "█████████████▓░ ░▓▒▒█▓▓▓▒▒▒░░▓▓░▓ ░▒░░▒░░▒▒░░░ ▓█████████████████\n" + "█████████████▓░▓▓▓▒▓▓░▓▓▒▒▒░░▓▓ ▓▒▓▒░░░░ ▒▒▒░░ ▓████████████████\n" + "█████████████▓▓▓▓▒▒▓█▓▓▒▒▒▒▒░▒▓ ▓▒▓▒░ ░▓▒▒░▒░ ████████████████\n" + "████████████▓▓▓▓▒▒▒██▓▓▒▒░░▒░▒█ ▓▒▒▒░░ ▒▒▒▒░░░ ███████████████\n" + "████████████▒▒▓▓░▒▓█▓▓▓▒▒░░░ ░█░▒▒▒▒░░▒░░ ▓▒▒ ░░░ ▒██████████████\n" + "███████████▓▒▒▒▓ ▓▓█░▒▓▓▒▒░ █▒▒▓░▒▒░░░░░ ▒▒▒▒░ ░ ██████████████\n" + "███████████▒░▒▓ ▓▓██▓▓▓▒░ ▓▓░▓▒▒▒░░░░░ ░▒░▒▒░ ▓█████████████\n" + "██████████▓░▒▓░▓▓▒▓███▓▓▒░░ ▒█ ░▓▓▒░░░░ ░▒██▓▒▒▒ ██████████████\n" + "██████████▒▒▒▒██▓ ▒▓█▓▓▓▒░ ░▒██░ ▒▒▒ ░▒▓█████▒ ▒██████████████\n" + "██████████▒▒▓█████▒░▓▓▓▒░▒▒▓▓▓░ ▓█▓▒▒░▒▓███████████████████████\n" + "███████████████████▓░▒▒▒▓▓▓▒▓░ ░▓██████▓█████████████████████████\n" + "█████████████████████▓░▒▒▒▓███████████████████████████████████████\n" + "██████████████████████████████████████████████████████████████████" + "\nSem orgulho algum por tudo o que estes apetrechos viram juntamente com seus olhos voce os equipa.\n" + "já sob o velho manto e armadura, com seu antigo arma no suporte em suas costas\n" + "o unico desejo ecoa por todo seu corpo… VINGANÇA!"); } System.out.println(""); Som.finalMario(); System.out.println("PARABENS! voce acaba de concluir a primeira fase do jogo!.\n" + "1-INICIAR ATO 2 2-IR PARA O MENU INICIAL 3-VERIFICAR STATUS"); resp = e.nextInt(); while (resp != 1 || resp != 2) { if (resp == 1) { Ato2 c = new Ato2(this.jogador); c.Enredo_2(); break; } if (resp == 2) { System.out.println("MOSTRAR STATUS"); break; } System.out.println("Numero invalido, tente novamente\n" + "1-INICIAR ATO 2 2-IR PARA O MENU INICIAL 3-VERIFICAR STATUS"); resp = e.nextInt(); } } public void inicializarJogador() { Especialidade arqueiro = new Arqueiro(); jogadorTeste = new Humano("Vagner", arqueiro); configuraDispensa(jogadorTeste.getClasseJogador()); } public void configuraDispensa(Especialidade classe) { dispensa = new Inventario(30); dispensa.adicionarItem(Itens.getCAMISA()); dispensa.adicionarItem(Itens.getJABUTICABA()); switch (classe.getDescricao()) { case "Guerreiro": dispensa.adicionarItem(Itens.ESPADA); break; case "Mago": dispensa.adicionarItem(Itens.CAJADO); break; case "Arqueiro": dispensa.adicionarItem(Itens.ARCO); break; default: break; } setDispensa(dispensa); } public Inventario javaliLeste(Especialidade classe) { Inventario recompensa = new Inventario(2); ItemAtaque item; if (classe.getDescricao().equals("Guerreiro")) { item = (ItemAtaque) Itens.ESPADAJUSTICEIRA; } else if (classe.getDescricao().equals("Mago")) { item = (ItemAtaque) Itens.CAJADOESMERALDA; } else if (classe.getDescricao().equals("Arqueiro")) { item = (ItemAtaque) Itens.ARCOANTIGO; } else { return recompensa; } item.getInformacoes(); System.out.println("Parabéns, você encontrou uma " + item.getInformacoes() + ", uma coxinha " + (Itens.COXINHA) + " e uma Camisa Longa" + (Itens.CAMISALONGA)); recompensa.adicionarItem(item); recompensa.adicionarItem(Itens.COXINHA); recompensa.adicionarItem(Itens.CAMISALONGA); return recompensa; } public Inventario cervoNorte(Especialidade classe) { Inventario recompensa = new Inventario(2); ItemAtaque item; if (classe.getDescricao().equals("Guerreiro")) { item = (ItemAtaque) Itens.ESPADAJUSTICEIRA; } else if (classe.getDescricao().equals("Mago")) { item = (ItemAtaque) Itens.CAJADOESMERALDA; } else if (classe.getDescricao().equals("Arqueiro")) { item = (ItemAtaque) Itens.ARCOANTIGO; } else { return recompensa; } item.getInformacoes(); System.out.println("Parabéns, você encontrou uma " + item.getInformacoes() + ", uma coxinha " + (Itens.COXINHA) + " e uma Camisa Longa" + (Itens.CAMISALONGA)); recompensa.adicionarItem(item); recompensa.adicionarItem(Itens.COXINHA); recompensa.adicionarItem(Itens.CAMISALONGA); return recompensa; } public Inventario globinFase2(Especialidade classe) { Inventario recompensa = new Inventario(3); ItemAtaque item; if (classe.getDescricao().equals("Guerreiro")) { item = (ItemAtaque) Itens.ESPADAMAGICA; } else if (classe.getDescricao().equals("Mago")) { item = (ItemAtaque) Itens.CAJADODEPAUS; } else if (classe.getDescricao().equals("Arqueiro")) { item = (ItemAtaque) Itens.ARCOARBALEST; } else { return recompensa; } item.getInformacoes(); System.out.println("Parabéns, você encontrou uma " + item.getInformacoes() + ", um espinafre " + (Itens.ESPINAFRE) + " e uma Armadura Azul" + (Itens.ARMADURAAZUL)); recompensa.adicionarItem(item); recompensa.adicionarItem(Itens.ESPINAFRE); recompensa.adicionarItem(Itens.ARMADURAAZUL); return recompensa; } public Inventario getDispensa() { return dispensa; } public void setDispensa(Inventario dispensa) { this.dispensa = dispensa; } private void abrirInventario(Inventario inventario, String descricao) { descricao.toUpperCase(); System.out.println("------------------"); System.out.println("---" + descricao + "---"); System.out.println("------------------"); for (int i = 0; i < inventario.verTodosItens().size(); i++) { System.out.println("[" + i + "] - " + inventario.verItem(i).getDescricao()); } System.out.println("------------------"); System.out.println("Selecione uma ação: "); System.out.println("0 - Fechar"); System.out.println("1 - Guardar na mochila"); System.out.println("2 - Comer alimento"); System.out.println("3 - Mover para a mão"); Scanner selecionar = new Scanner(System.in); int acao = selecionar.nextInt(); if (acao == 1) { coletaDeItens(inventario); } else if (acao == 2) { System.out.println("Digite o número do alimento que deseja comer: "); Scanner alimento = new Scanner(System.in); int numAlimento = alimento.nextInt(); Item comida = inventario.pegarItem(numAlimento); try { jogadorTeste.comer((Comida) comida); System.out.println("Yummmm..."); System.out.println("Sua vida atual é: " + jogadorTeste.getbVidaAtual() + "/" + jogadorTeste.getbVida()); abrirInventario(inventario, descricao); } catch (Exception e) { System.out.println("Não foi possível comer o item."); inventario.adicionarItem(comida); abrirInventario(inventario, descricao); } } else if (acao == 3) { System.out.println("Digite o número do item que deseja mover para a mão: "); Scanner itemDeAtaque = new Scanner(System.in); int posicao = itemDeAtaque.nextInt(); System.out.println("Você tem certeza que deseja descartar o item atual para pegar o item " + inventario.verItem(posicao).getDescricao() + "(1 - Sim / 2 - Não)"); Scanner confirma = new Scanner(System.in); int resposta = confirma.nextInt(); if (resposta == 1) { try { Item itemAtaque = inventario.pegarItem(posicao); jogadorTeste.setItemDaMao((ItemDeCombate) itemAtaque); } catch (Exception e) { System.out.println("Erro ao pegar item. Verifique se é um item de ataque."); } } } } private void abrirMochila(Inventario inventario, String descricao) { System.out.println("------------------"); System.out.println("---" + descricao.toUpperCase() + "---"); System.out.println("------------------"); for (int i = 0; i < inventario.verTodosItens().size(); i++) { System.out.println("[" + i + "] - " + inventario.verItem(i).getDescricao()); } System.out.println("------------------"); System.out.println("Selecione uma ação: "); System.out.println("0 - Fechar"); System.out.println("1 - Comer item"); Scanner selecao = new Scanner(System.in); int acao1 = selecao.nextInt(); if (acao1 == 1) { System.out.println("Digite o número do alimento que deseja comer: "); Scanner alimento = new Scanner(System.in); int numAlimento = alimento.nextInt(); Item comida = inventario.pegarItem(numAlimento); try { jogadorTeste.comer((Comida) comida); System.out.println("Yummmm..."); System.out.println("Sua vida atual é: " + jogadorTeste.getbVidaAtual() + "/" + jogadorTeste.getbVida()); abrirMochila(inventario, descricao); } catch (Exception e) { System.out.println("Não foi possível comer o item."); inventario.adicionarItem(comida); abrirMochila(inventario, descricao); } } } private void coletaDeItens(Inventario dispensa) { int acao = 1; while (acao != 0) { for (int i = 0; i < dispensa.verTodosItens().size(); i++) { System.out.println("[" + i + "] - " + dispensa.verItem(i).getDescricao()); } System.out.println(" -- Digite o número do item que você deseja pegar:"); Scanner selecionar = new Scanner(System.in); int pegar = selecionar.nextInt(); try { Item itemPego = dispensa.pegarItem(pegar); jogadorTeste.adicionarItem(itemPego); System.out.println("Item adicioando à mochila!"); } catch (Exception e) { System.out.println("Esse item não pode ser pego!"); System.out.println("Motivo: " + e.getMessage()); } System.out.println("Deseja pegar outro item (1) ou fechar a mochila (0)?"); acao = selecionar.nextInt(); } } }
Juntando todos os métodos de loot para apenas 1 principal.
src/atos/Ato1.java
Juntando todos os métodos de loot para apenas 1 principal.
Java
mit
54d44f552837430c1113d4abcf2a61d20a5cf521
0
divayprakash/isprime
package io.github.divayprakash.isprime; import android.content.Intent; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.MenuItem; import android.view.View; import android.widget.TextView; /** * The HintActivity class implements the control logic for the hint screen * displayed when the user presses the "Cheat" button. * * @author Divay Prakash */ public class CheatActivity extends AppCompatActivity { private TextView cheatTitle; private TextView cheatDisplay; private int RANDOM_NUMBER; private boolean IS_CHEAT_TAKEN = false; /** * This method is called at the startup of the application. It sets the * view to the XML file associated with this Activity. The method also * enables the back button on the action bar. * @param savedInstanceState The saved instance state of the application. */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_cheat); Intent intent = getIntent(); RANDOM_NUMBER = intent.getIntExtra("RandomNumber", 2); TextView cheatTitle = (TextView)findViewById(R.id.cheatTitle); TextView cheatDisplay = (TextView)findViewById(R.id.cheatDisplay); } private void setIntentValues(){ Intent intent = getIntent(); if (IS_CHEAT_TAKEN) { setResult(RESULT_OK, intent); } else { setResult(RESULT_CANCELED, intent); } finish(); } public void onShowCheat(View view) { IS_CHEAT_TAKEN = true; cheatTitle.setVisibility(View.VISIBLE); boolean IS_PRIME = isPrime(); if (IS_PRIME) { cheatDisplay.setText("TRUE"); } else { cheatDisplay.setText("FALSE"); } cheatDisplay.setVisibility(View.VISIBLE); } private boolean isPrime() { for (int Divisor = 2; Divisor < RANDOM_NUMBER / 2; Divisor++) { if (RANDOM_NUMBER % Divisor == 0) return false; } return true; } /** * This method is called when the back button on the action bar is pressed. * It calls finish() to end this activity and return to the MainActivity. * @param item The menu item instance passed to this method. * @return true */ @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == android.R.id.home) { setIntentValues(); return true; } return super.onOptionsItemSelected(item); } @Override public void onBackPressed() { setIntentValues(); } }
app/src/main/java/io/github/divayprakash/isprime/CheatActivity.java
package io.github.divayprakash.isprime; import android.content.Intent; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.MenuItem; import android.view.View; import android.widget.TextView; /** * The HintActivity class implements the control logic for the hint screen * displayed when the user presses the "Cheat" button. * * @author Divay Prakash */ public class CheatActivity extends AppCompatActivity { private TextView cheatTitle; private TextView cheatDisplay; private int RANDOM_NUMBER; private boolean IS_CHEAT_TAKEN = false; /** * This method is called at the startup of the application. It sets the * view to the XML file associated with this Activity. The method also * enables the back button on the action bar. * @param savedInstanceState The saved instance state of the application. */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_cheat); Intent intent = getIntent(); RANDOM_NUMBER = intent.getIntExtra("RandomNumber", 2); TextView cheatTitle = (TextView)findViewById(R.id.cheatTitle); TextView cheatDisplay = (TextView)findViewById(R.id.cheatDisplay); } private void setIntentValues(){ Intent intent = getIntent(); intent.putExtra("IsCheatTaken", IS_CHEAT_TAKEN); setResult(RESULT_OK, intent); finish(); } public void onShowCheat(View view) { IS_CHEAT_TAKEN = true; cheatTitle.setVisibility(View.VISIBLE); boolean IS_PRIME = isPrime(); if (IS_PRIME) { cheatDisplay.setText("TRUE"); } else { cheatDisplay.setText("FALSE"); } cheatDisplay.setVisibility(View.VISIBLE); } private boolean isPrime() { for (int Divisor = 2; Divisor < RANDOM_NUMBER / 2; Divisor++) { if (RANDOM_NUMBER % Divisor == 0) return false; } return true; } /** * This method is called when the back button on the action bar is pressed. * It calls finish() to end this activity and return to the MainActivity. * @param item The menu item instance passed to this method. * @return true */ @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == android.R.id.home) { setIntentValues(); return true; } return super.onOptionsItemSelected(item); } @Override public void onBackPressed() { setIntentValues(); } }
CheatActivity and activity_cheat in progress
app/src/main/java/io/github/divayprakash/isprime/CheatActivity.java
CheatActivity and activity_cheat in progress
Java
epl-1.0
8af1493d437e564fa1aa54dbecdd4c0304931087
0
akurtakov/Pydev,RandallDW/Aruba_plugin,fabioz/Pydev,RandallDW/Aruba_plugin,fabioz/Pydev,rajul/Pydev,rajul/Pydev,RandallDW/Aruba_plugin,aptana/Pydev,rajul/Pydev,rajul/Pydev,akurtakov/Pydev,akurtakov/Pydev,fabioz/Pydev,fabioz/Pydev,akurtakov/Pydev,rgom/Pydev,rgom/Pydev,RandallDW/Aruba_plugin,RandallDW/Aruba_plugin,rgom/Pydev,fabioz/Pydev,rajul/Pydev,aptana/Pydev,RandallDW/Aruba_plugin,fabioz/Pydev,rgom/Pydev,akurtakov/Pydev,rgom/Pydev,aptana/Pydev,rajul/Pydev,rgom/Pydev,akurtakov/Pydev
/* * Author: atotic * Created: Jul 25, 2003 * License: Common Public License v1.0 */ package org.python.pydev.parser; import java.io.StringReader; import java.util.ArrayList; import java.util.Iterator; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.jface.text.Assert; import org.eclipse.jface.text.DocumentEvent; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.text.IDocumentListener; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IFileEditorInput; import org.python.parser.ParseException; import org.python.parser.PythonGrammar; import org.python.parser.ReaderCharStream; import org.python.parser.SimpleNode; import org.python.parser.TokenMgrError; import org.python.pydev.editor.PyEdit; /** * PyParser uses org.python.parser to parse the document * (lexical analysis) * It is attached to PyEdit (a view), and it listens to document changes * On every document change, the syntax tree is regenerated * The reparsing of the document is done on a ParsingThread * * Clients that need to know when new parse tree has been generated * should register as parseListeners. */ public class PyParser { IDocument document; PyEdit editorView; IDocumentListener documentListener; // listens to changes in the document ArrayList parserListeners; // listeners that get notified SimpleNode root; // root of the last PythonGrammar analysis static final boolean parseOnThread = true; // can turn of thread parsing for debugging ParsingThread parsingThread; // thread that reparses the document public PyParser(PyEdit editorView) { this.editorView = editorView; root = null; parserListeners = new ArrayList(); parsingThread = new ParsingThread(this); parsingThread.setName("Parsing thread"); } public void dispose() { // remove the listeners if (document != null) document.removeDocumentListener(documentListener); parsingThread.diePlease(); } public void setDocument(IDocument document) { // Cleans up old listeners if (this.document != null) { document.removeDocumentListener(documentListener); } // Set up new listener this.document = document; if (document == null) { System.err.println("No document in PyParser::setDocument?"); return; } documentListener = new IDocumentListener() { public void documentChanged(DocumentEvent event) { if (parseOnThread == true) parsingThread.documentChanged(); else reparseDocument(); } public void documentAboutToBeChanged(DocumentEvent event) {} }; document.addDocumentListener(documentListener); // Reparse document on the initial set parsingThread.start(); if (parseOnThread == true) parsingThread.documentChanged(); else reparseDocument(); } public SimpleNode getRoot() { return root; } /** stock listener implementation */ public void addParseListener(IParserListener listener) { Assert.isNotNull(listener); if (! parserListeners.contains(listener)) parserListeners.add(listener); } /** stock listener implementation */ public void removeParseListener(IParserListener listener) { Assert.isNotNull(listener); parserListeners.remove(listener); } /** * stock listener implementation * event is fired whenever we get a new root */ protected void fireParserChanged() { if (parserListeners.size() > 0) { ArrayList list= new ArrayList(parserListeners); Iterator e= list.iterator(); while (e.hasNext()) { IParserListener l= (IParserListener) e.next(); l.parserChanged(root); } } } /** * stock listener implementation * event is fired when parse fails */ protected void fireParserError(Throwable error) { if (parserListeners.size() > 0) { ArrayList list= new ArrayList(parserListeners); Iterator e= list.iterator(); while (e.hasNext()) { IParserListener l= (IParserListener) e.next(); l.parserError(error); } } } /** * Parses the document, generates error annotations */ void reparseDocument() { StringReader inString = new StringReader(document.get()); ReaderCharStream in = new ReaderCharStream(inString); PythonGrammar grammar = new PythonGrammar(in, new CompilerAPI()); IEditorInput input = editorView.getEditorInput(); if (input == null) return; IFile original= (input instanceof IFileEditorInput) ? ((IFileEditorInput) input).getFile() : null; try { SimpleNode newRoot = grammar.file_input(); // parses the file root = newRoot; original.deleteMarkers(IMarker.PROBLEM, false, 1); fireParserChanged(); } catch (ParseException parseErr) { fireParserError(parseErr); } catch (TokenMgrError tokenErr) { fireParserError(tokenErr); }catch (Exception e) { System.err.println("Unexpected parse error"); e.printStackTrace(); } } } /** * Utility thread that reparses document on regular intervals * it waits for document to get changed * after each reparse, thread waits a bit to avoid flicker */ class ParsingThread extends Thread { PyParser parser; boolean docChanged = false; boolean stayingAlive = true; ParsingThread(PyParser parser) { this.parser = parser; } public synchronized void waitForChange() throws InterruptedException { if (docChanged == false) wait(); docChanged = false; } public synchronized void documentChanged() { docChanged = true; notify(); } public synchronized void diePlease() { stayingAlive = false; notify(); } public void run() { // wait for document change, and reparse try { while (stayingAlive) { waitForChange(); sleep(2000); // sleep a bit, to avoid flicker synchronized(this) { docChanged = false; } if (stayingAlive == true) { // could have been woken up by diePlease() parser.reparseDocument(); } } } catch (InterruptedException e) { return; } } }
org.python.pydev/src/org/python/pydev/parser/PyParser.java
/* * Author: atotic * Created: Jul 25, 2003 * License: Common Public License v1.0 */ package org.python.pydev.parser; import java.io.StringReader; import java.util.ArrayList; import java.util.Iterator; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.jface.text.Assert; import org.eclipse.jface.text.DocumentEvent; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.text.IDocumentListener; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IFileEditorInput; import org.python.parser.ParseException; import org.python.parser.PythonGrammar; import org.python.parser.ReaderCharStream; import org.python.parser.SimpleNode; import org.python.parser.TokenMgrError; import org.python.pydev.editor.PyEdit; /** * PyParser uses org.python.parser to parse the document * (lexical analysis) * It is attached to PyEdit (a view), and it listens to document changes * On every document change, the syntax tree is regenerated * The reparsing of the document is done on a ParsingThread * * Clients that need to know when new parse tree has been generated * should register as parseListeners. */ public class PyParser { IDocument document; PyEdit editorView; IDocumentListener documentListener; // listens to changes in the document ArrayList parserListeners; // listeners that get notified SimpleNode root; // root of the last PythonGrammar analysis static final boolean parseOnThread = true; // can turn of thread parsing for debugging ParsingThread parsingThread; // thread that reparses the document public PyParser(PyEdit editorView) { this.editorView = editorView; root = null; parserListeners = new ArrayList(); parsingThread = new ParsingThread(this); parsingThread.setName("Parsing thread"); } public void dispose() { // remove the listeners document.removeDocumentListener(documentListener); parsingThread.diePlease(); } public void setDocument(IDocument document) { // Cleans up old listeners if (this.document != null) { document.removeDocumentListener(documentListener); } // Set up new listener this.document = document; documentListener = new IDocumentListener() { public void documentChanged(DocumentEvent event) { if (parseOnThread == true) parsingThread.documentChanged(); else reparseDocument(); } public void documentAboutToBeChanged(DocumentEvent event) {} }; document.addDocumentListener(documentListener); // Reparse document on the initial set parsingThread.start(); if (parseOnThread == true) parsingThread.documentChanged(); else reparseDocument(); } public SimpleNode getRoot() { return root; } /** stock listener implementation */ public void addParseListener(IParserListener listener) { Assert.isNotNull(listener); if (! parserListeners.contains(listener)) parserListeners.add(listener); } /** stock listener implementation */ public void removeParseListener(IParserListener listener) { Assert.isNotNull(listener); parserListeners.remove(listener); } /** * stock listener implementation * event is fired whenever we get a new root */ protected void fireParserChanged() { if (parserListeners.size() > 0) { ArrayList list= new ArrayList(parserListeners); Iterator e= list.iterator(); while (e.hasNext()) { IParserListener l= (IParserListener) e.next(); l.parserChanged(root); } } } /** * stock listener implementation * event is fired when parse fails */ protected void fireParserError(Throwable error) { if (parserListeners.size() > 0) { ArrayList list= new ArrayList(parserListeners); Iterator e= list.iterator(); while (e.hasNext()) { IParserListener l= (IParserListener) e.next(); l.parserError(error); } } } /** * Parses the document, generates error annotations */ void reparseDocument() { StringReader inString = new StringReader(document.get()); ReaderCharStream in = new ReaderCharStream(inString); PythonGrammar grammar = new PythonGrammar(in, new CompilerAPI()); IEditorInput input = editorView.getEditorInput(); if (input == null) return; IFile original= (input instanceof IFileEditorInput) ? ((IFileEditorInput) input).getFile() : null; try { SimpleNode newRoot = grammar.file_input(); // parses the file root = newRoot; original.deleteMarkers(IMarker.PROBLEM, false, 1); fireParserChanged(); } catch (ParseException parseErr) { fireParserError(parseErr); } catch (TokenMgrError tokenErr) { fireParserError(tokenErr); }catch (Exception e) { System.err.println("Unexpected parse error"); e.printStackTrace(); } } } /** * Utility thread that reparses document on regular intervals * it waits for document to get changed * after each reparse, thread waits a bit to avoid flicker */ class ParsingThread extends Thread { PyParser parser; boolean docChanged = false; boolean stayingAlive = true; ParsingThread(PyParser parser) { this.parser = parser; } public synchronized void waitForChange() throws InterruptedException { if (docChanged == false) wait(); docChanged = false; } public synchronized void documentChanged() { docChanged = true; notify(); } public synchronized void diePlease() { stayingAlive = false; notify(); } public void run() { // wait for document change, and reparse try { while (stayingAlive) { waitForChange(); sleep(2000); // sleep a bit, to avoid flicker synchronized(this) { docChanged = false; } if (stayingAlive == true) { // could have been woken up by diePlease() parser.reparseDocument(); } } } catch (InterruptedException e) { return; } } }
Deal with "Open External File" weirdness more gracefully. Still an error but not an uncaught exception. git-svn-id: cdbd3c3453b226d8644b39c93ea790e37ea3ca1b@13 7f4d9e04-a92a-ab41-bea9-970b690ef4a7
org.python.pydev/src/org/python/pydev/parser/PyParser.java
Deal with "Open External File" weirdness more gracefully. Still an error but not an uncaught exception.
Java
epl-1.0
3079ce856cd78540fc349be6d082a8b08e2a4020
0
CyberdyneOfCerrado/Backups
package bancoDeDados; import java.lang.reflect.Field; import java.lang.reflect.Type; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import objetos.Backup; import objetos.RegraBackup; public class ReflexaoSql { public ReflexaoSql(){ }; public String gerarInsert( Object ob ){ Class<? extends Object> classe = ob.getClass(); String sql = "INSERT INTO " + classe.getSimpleName().toUpperCase() +" ("; Field [] f = classe.getDeclaredFields(); for( Field temp : f ){ Type type = temp.getGenericType(); if( !temp.getName().equals("primaryKey") && !type.toString().contains("historico") && !type.toString().contains("Regra") ) sql += temp.getName().toUpperCase() +","; } sql = sql.substring(0,sql.length()-1) +") VALUES ("; for( Field temp : f ){ try { Type typef = temp.getGenericType(); if( !temp.getName().equals("primaryKey") && !typef.toString().contains("historico") && !typef.toString().contains("Regra")){ String type = temp.getGenericType().toString(); if( type.contains("String") || type.contains("DiasSemana") || type.contains("boolean") || type.contains("Date") ){ sql += "'"+temp.get(ob) +"',"; } else{ sql += ""+temp.get(ob) +","; } } } catch (IllegalArgumentException | IllegalAccessException e){ e.printStackTrace(); return null; } } sql = sql.substring(0,sql.length()-1)+")"; return sql; }; public String gerarInsertComFk(Object ob , int fk ){ Class<? extends Object> classe = ob.getClass(); String sql = "INSERT INTO " + classe.getSimpleName().toUpperCase() +" ("; Field [] f = classe.getDeclaredFields(); for( Field temp : f ){ Type type = temp.getGenericType(); if( !temp.getName().equals("primaryKey") && !type.toString().contains("historico") && !type.toString().contains("Regra") ){ sql += temp.getName().toUpperCase() +","; } } sql += "FK) VALUES ("; for( Field temp : f ){ try { Type typef = temp.getGenericType(); if( !temp.getName().equals("primaryKey") && !typef.toString().contains("historico") && !typef.toString().contains("Regra") ){ String type = temp.getGenericType().toString(); if( type.contains("String") || type.contains("DiasSemana") || type.contains("boolean") || type.contains("Date") || type.contains("Status") ){ sql += "'"+temp.get(ob) +"',"; } else{ sql += ""+temp.get(ob) +","; } } } catch (IllegalArgumentException | IllegalAccessException e){ e.printStackTrace(); return null; } } return sql += fk +")"; }; public String recuperarId( Object ob ){ Class<? extends Object> classe = ob.getClass(); String sql = "SELECT PRIMARYKEY FROM " + classe.getSimpleName().toUpperCase() +" WHERE ( "; Field [] f = classe.getDeclaredFields(); for( Field temp : f ){ try { Type typef = temp.getGenericType(); if( !temp.getName().equals("primaryKey") && !typef.toString().contains("historico") && !typef.toString().contains("Regra") ){ String type = temp.getGenericType().toString(); if( type.contains("String") || type.contains("DiasSemana") || type.contains("boolean") || type.contains("Date") || type.contains("Status")){ sql += temp.getName().toUpperCase() +" = '"+ temp.get(ob)+"' AND "; } else { sql += temp.getName().toUpperCase() +" = "+ temp.get(ob)+" AND "; } } } catch (IllegalArgumentException | IllegalAccessException e){ e.printStackTrace(); } } return sql.substring(0,sql.length()-4)+")"; }; public ArrayList<Object> resgataObjeto ( ResultSet result ){ ArrayList<Object> arl = new ArrayList<>(); try { while (result.next()) { switch(result.getMetaData().getTableName(1)){ case"BACKUP": System.err.println("BACKUP"); arl.add( new Backup(result.getInt(1),result.getString(2),result.getString(3))); break; case"REGRABACKUP": System.err.println("REGRABACKUP"); arl.add( new RegraBackup(result.getInt(1), result.getString(2), result.getString(3))); break; case"VERSAO": System.err.println("VERSAO"); break; case"ARTEFATO": System.err.println("ARTEFATO"); break; case"DIAS": System.err.println("DIAS"); break; } } } catch (SQLException e) { e.printStackTrace(); }; return arl; }; }
src/bancoDeDados/ReflexaoSql.java
package bancoDeDados; import java.lang.reflect.Field; import java.lang.reflect.Type; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import objetos.Backup; import objetos.RegraBackup; public class ReflexaoSql { public ReflexaoSql(){ }; public String gerarInsert( Object ob ){ Class<? extends Object> classe = ob.getClass(); String sql = "INSERT INTO " + classe.getSimpleName().toUpperCase() +" ("; Field [] f = classe.getDeclaredFields(); for( Field temp : f ){ Type type = temp.getGenericType(); if( !temp.getName().equals("primaryKey") && !type.toString().contains("historico") && !type.toString().contains("Regra") ) sql += temp.getName().toUpperCase() +","; } sql = sql.substring(0,sql.length()-1) +") VALUES ("; for( Field temp : f ){ try { Type typef = temp.getGenericType(); if( !temp.getName().equals("primaryKey") && !typef.toString().contains("historico") && !typef.toString().contains("Regra")){ String type = temp.getGenericType().toString(); if( type.contains("String") || type.contains("DiasSemana") || type.contains("boolean") || type.contains("Date") ){ sql += "'"+temp.get(ob) +"',"; } else{ sql += ""+temp.get(ob) +","; } } } catch (IllegalArgumentException | IllegalAccessException e){ e.printStackTrace(); return null; } } sql = sql.substring(0,sql.length()-1)+")"; return sql; }; public String gerarInsertComFk(Object ob , int fk ){ Class<? extends Object> classe = ob.getClass(); String sql = "INSERT INTO " + classe.getSimpleName().toUpperCase() +" ("; Field [] f = classe.getDeclaredFields(); for( Field temp : f ){ Type type = temp.getGenericType(); if( !temp.getName().equals("primaryKey") && !type.toString().contains("historico") && !type.toString().contains("Regra") ){ sql += temp.getName().toUpperCase() +","; } } sql += "FK) VALUES ("; for( Field temp : f ){ try { Type typef = temp.getGenericType(); if( !temp.getName().equals("primaryKey") && !typef.toString().contains("historico") && !typef.toString().contains("Regra") ){ String type = temp.getGenericType().toString(); if( type.contains("String") || type.contains("DiasSemana") || type.contains("boolean") || type.contains("Date") || type.contains("Status") ){ sql += "'"+temp.get(ob) +"',"; } else{ sql += ""+temp.get(ob) +","; } } } catch (IllegalArgumentException | IllegalAccessException e){ e.printStackTrace(); return null; } } return sql += fk +")"; }; public String recuperarId( Object ob ){ Class<? extends Object> classe = ob.getClass(); String sql = "SELECT PRIMARYKEY FROM " + classe.getSimpleName().toUpperCase() +" WHERE ( "; Field [] f = classe.getDeclaredFields(); for( Field temp : f ){ try { Type typef = temp.getGenericType(); if( !temp.getName().equals("primaryKey") && !typef.toString().contains("historico") && !typef.toString().contains("Regra") ){ String type = temp.getGenericType().toString(); if( type.contains("String") || type.contains("DiasSemana") || type.contains("boolean") || type.contains("Date") || type.contains("Status")){ sql += temp.getName().toUpperCase() +" = '"+ temp.get(ob)+"' AND "; } else { sql += temp.getName().toUpperCase() +" = "+ temp.get(ob)+" AND "; } } } catch (IllegalArgumentException | IllegalAccessException e){ e.printStackTrace(); } } return sql.substring(0,sql.length()-4)+")"; }; public ArrayList<Object> resgataObjeto ( ResultSet result ){ ArrayList<Object> arl = new ArrayList<>(); try { while (result.next()) { switch(result.getMetaData().getTableName(1)){ case"BACKUP": System.err.println("BACKUP"); arl.add( new Backup(result.getInt(1),result.getString(2),result.getString(3))); break; case"REGRABACKUP": System.err.println("REGRABACKUP"); arl.add( new RegraBackup(result.getInt(1), result.getString(2), result.getString(3))); break; case"VERSAO": System.err.println("VERSAO"); break; case"ARTEFATO": System.err.println("ARTEFATO"); break; case"DIAS": System.err.println("DIAS"); break; } } } catch (SQLException e) { e.printStackTrace(); }; return arl; }; }
Ajeitando identação
src/bancoDeDados/ReflexaoSql.java
Ajeitando identação
Java
agpl-3.0
bdcf580bd686e7ccb52eb0921ca857be6d197d6c
0
VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb
/* This file is part of VoltDB. * Copyright (C) 2019 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.compiler.statements; import java.util.regex.Matcher; import org.hsqldb_voltpatches.VoltXMLElement; import org.voltdb.catalog.CatalogMap; import org.voltdb.catalog.Database; import org.voltdb.catalog.Topic; import org.voltdb.compiler.DDLCompiler; import org.voltdb.compiler.DDLCompiler.DDLStatement; import org.voltdb.compiler.DDLCompiler.StatementProcessor; import org.voltdb.compiler.VoltCompiler.DdlProceduresToLoad; import org.voltdb.compiler.VoltCompiler.VoltCompilerException; import org.voltdb.parser.SQLParser; public class DropTopic extends StatementProcessor { public DropTopic(DDLCompiler ddlCompiler) { super(ddlCompiler); } @Override protected boolean processStatement(DDLStatement ddlStatement, Database db, DdlProceduresToLoad whichProcs) throws VoltCompilerException { Matcher matcher = SQLParser.matchDropTopic(ddlStatement.statement); if (!matcher.matches()) { return false; } String name = matcher.group("name"); CatalogMap<Topic> topics = db.getTopics(); if (topics.get(name) == null) { if (matcher.group("ifExists") == null) { throw m_compiler.new VoltCompilerException( String.format("Topic name \"%s\" in DROP TOPIC statement does not exist.", name)); } } else { topics.delete(name); VoltXMLElement tableXML = m_schema.findChild("table", name.toUpperCase()); if (tableXML != null) { tableXML.attributes.remove("topicName"); } } return true; } }
src/frontend/org/voltdb/compiler/statements/DropTopic.java
/* This file is part of VoltDB. * Copyright (C) 2019 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.compiler.statements; import java.util.regex.Matcher; import org.voltdb.catalog.CatalogMap; import org.voltdb.catalog.Database; import org.voltdb.catalog.Table; import org.voltdb.catalog.Topic; import org.voltdb.compiler.DDLCompiler; import org.voltdb.compiler.DDLCompiler.DDLStatement; import org.voltdb.compiler.DDLCompiler.StatementProcessor; import org.voltdb.compiler.VoltCompiler.DdlProceduresToLoad; import org.voltdb.compiler.VoltCompiler.VoltCompilerException; import org.voltdb.parser.SQLParser; public class DropTopic extends StatementProcessor { public DropTopic(DDLCompiler ddlCompiler) { super(ddlCompiler); } @Override protected boolean processStatement(DDLStatement ddlStatement, Database db, DdlProceduresToLoad whichProcs) throws VoltCompilerException { Matcher matcher = SQLParser.matchDropTopic(ddlStatement.statement); if (!matcher.matches()) { return false; } String name = matcher.group("name"); CatalogMap<Topic> topics = db.getTopics(); if (topics.get(name) == null) { if (matcher.group("ifExists") == null) { throw m_compiler.new VoltCompilerException( String.format("Topic name \"%s\" in DROP TOPIC statement does not exist.", name)); } } else { topics.delete(name); Table t = db.getTables().get(name); if (t != null) { t.setTopicname(""); } } return true; } }
ENG-20097 remove topic reference in table when topic is dropped (#99)
src/frontend/org/voltdb/compiler/statements/DropTopic.java
ENG-20097 remove topic reference in table when topic is dropped (#99)
Java
lgpl-2.1
8f80648c7768e712529ebcf0dee511aaf286276b
0
vigna/Sux4J,vigna/Sux4J,vigna/Sux4J,vigna/Sux4J
package it.unimi.dsi.sux4j.util; /* * Sux4J: Succinct data structures for Java * * Copyright (C) 2010 Sebastiano Vigna * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 3 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, see <http://www.gnu.org/licenses/>. * */ import it.unimi.dsi.Util; import it.unimi.dsi.bits.BitVector; import it.unimi.dsi.bits.BitVectors; import it.unimi.dsi.bits.Fast; import it.unimi.dsi.bits.LongArrayBitVector; import it.unimi.dsi.bits.TransformationStrategies; import it.unimi.dsi.bits.TransformationStrategy; import it.unimi.dsi.fastutil.booleans.BooleanArrayList; import it.unimi.dsi.fastutil.ints.IntArrayList; import it.unimi.dsi.fastutil.ints.IntIterator; import it.unimi.dsi.fastutil.ints.IntOpenHashSet; import it.unimi.dsi.fastutil.io.BinIO; import it.unimi.dsi.fastutil.longs.LongOpenHashSet; import it.unimi.dsi.fastutil.objects.AbstractObjectIterator; import it.unimi.dsi.fastutil.objects.AbstractObjectSet; import it.unimi.dsi.fastutil.objects.AbstractObjectSortedSet; import it.unimi.dsi.fastutil.objects.ObjectArrayList; import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator; import it.unimi.dsi.fastutil.objects.ObjectIterator; import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet; import it.unimi.dsi.fastutil.objects.ObjectSet; import it.unimi.dsi.fastutil.objects.ObjectSortedSet; import it.unimi.dsi.io.FastBufferedReader; import it.unimi.dsi.io.LineIterator; import it.unimi.dsi.lang.MutableString; import it.unimi.dsi.logging.ProgressLogger; import it.unimi.dsi.sux4j.mph.Hashes; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.nio.charset.Charset; import java.util.Comparator; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.zip.GZIPInputStream; import org.apache.log4j.Logger; import com.martiansoftware.jsap.FlaggedOption; import com.martiansoftware.jsap.JSAP; import com.martiansoftware.jsap.JSAPException; import com.martiansoftware.jsap.JSAPResult; import com.martiansoftware.jsap.Parameter; import com.martiansoftware.jsap.SimpleJSAP; import com.martiansoftware.jsap.Switch; import com.martiansoftware.jsap.UnflaggedOption; import com.martiansoftware.jsap.stringparsers.ForNameStringParser; /** A z-fast trie, that is, a predecessor/successor data structure using low linear (in the number of keys) additional space and * answering to the query string * <var>x</var> in time |<var>x</var>|/<var>w</var> + log(max{|<var>x</var>|, |<var>x</var><sup>-</sup>|, |<var>x</var><sup>+</sup>|}) with high probability, * where <var>w</var> is the machine word size, and <var>x</var><sup>-</sup>/<var>x</var><sup>+</sup> are the predecessor/successor of <var>x</var> in the currently stored set, respectively. * * <p>In rough terms, the z-fast trie uses time |<var>x</var>|/<var>w</var> (which is optimal) to actually look at the string content, * and log(max{|<var>x</var>|, |<var>x</var><sup>-</sup>|, |<var>x</var><sup>+</sup>|}) to perform the search. This is known to be (essentially) optimal. * String lengths are up to {@link Integer#MAX_VALUE}, and not limited to be a constant multiple of <var>w</var> for the bounds to hold. * * <p>The linear overhead of a z-fast trie is very low. For <var>n</var> keys we allocate 2<var>n</var> &minus; 1 nodes containing six references and * two longs, plus a dictionary containing <var>n</var> &minus; 1 nodes (thus using around 2<var>n</var> references and 2<var>n</var> longs). * */ public class ZFastTrie<T> extends AbstractObjectSortedSet<T> implements Serializable { public static final long serialVersionUID = 1L; private static final Logger LOGGER = Util.getLogger( ZFastTrie.class ); private static final boolean ASSERTS = true; private static final boolean SHORT_SIGNATURES = false; private static final boolean DEBUG = true; private static final boolean DDEBUG = DEBUG; /** The number of elements in the trie. */ private int size; /** The root node. */ private transient Node root; /** The transformation strategy. */ private final TransformationStrategy<? super T> transform; /** A dictionary mapping handles to the corresponding internal nodes. */ public transient Map map; /** The head of the doubly linked list of leaves. */ private transient Leaf head; /** The tail of the doubly linked list of leaves. */ private transient Leaf tail; /** A linear-probing hash map that compares keys using signatures as a first try. */ public final static class Map { private static final long serialVersionUID = 1L; private static final int INITIAL_LENGTH = 64; /** The transformation strategy. */ private final TransformationStrategy<Object> transform; /** The node table. */ private InternalNode[] node; /** The signature of the handle of the corresponding entry {@link #node}. */ private long[] signature; /** An array parallel to {@link #node} specifying whether a signature is a duplicate. * If true, there are more copies of the signature along the search path. */ private boolean dup[]; /** The number of elements in the table. */ private int size; /** The number of slots in the table (always a power of two). */ private int length; /** {@link #length} &minus; 1. */ private int mask; private void assertTable() { for( int i = signature.length; i-- != 0; ) if ( node[ i ] != null ) assert get( node[ i ].handle( transform ), true ) == node[ i ]; if ( size == 0 ) return; final IntOpenHashSet overallHashes = new IntOpenHashSet(); int start = 0; int first = -1; while( node[ start ] != null ) start = ( start + 1 ) & mask; // We are on an empty entry for( ;; ) { while( node[ start ] == null ) start = ( start + 1 ) & mask; // We are on a nonempty entry if ( first == -1 ) first = start; else if ( first == start ) break; int end = start; while( node[ end ] != null ) end = ( end + 1 ) & mask; // [start..end) is a maximal nonempty subsequence LongOpenHashSet signaturesSeen = new LongOpenHashSet(); IntOpenHashSet hashesSeen = new IntOpenHashSet(); for( int pos = end; pos != start; ) { pos = ( pos - 1 ) & mask; assert signaturesSeen.add( signature[ pos ] ) ^ dup[ pos ]; hashesSeen.add( hash( signature[ pos ] ) ); } // Hashes in each maximal nonempty subsequence must be disjoint for( IntIterator iterator = hashesSeen.iterator(); iterator.hasNext(); ) assert overallHashes.add( iterator.nextInt() ); start = end; } } public Map( final int size, TransformationStrategy<Object> transform ) { this.transform = transform; length = Math.max( INITIAL_LENGTH, 1 << Fast.ceilLog2( 1 + ( 3L * size / 2 ) ) ); mask = length - 1; signature = new long[ length ]; node = new InternalNode[ length ]; dup = new boolean[ length ]; } public Map( TransformationStrategy<Object> transform ) { this.transform = transform; length = INITIAL_LENGTH; mask = length - 1; signature = new long[ length ]; node = new InternalNode[ length ]; dup = new boolean[ length ]; } //public transient long probes = 0; //public transient long scans = 0; /** Generates a hash table position starting from the signature. * * @param s a signature. */ private int hash( final long s ) { return (int)( s ^ s >>> 32 ) & mask; } /** Find the position in the table of a given handle using signatures. * * <p>Note that this function just compares signatures (except for duplicates, which are * checked explicitly). Thus, it might return false positives. * * @param s the signature of the prefix of <code>v</code> of <code>handleLength</code> bits. * @param v a bit vector. * @param handleLength the length of the prefix of <code>v</code> that will be used as a handle. * @return the position in the table where the specified handle can be found, or <code>null</code>. */ private int findPos( final long s, final BitVector v, final long handleLength ) { int pos = hash( s ); while( node[ pos ] != null ) { if ( signature[ pos ] == s && ( ! dup[ pos ] || handleLength == node[ pos ].handleLength() && v.longestCommonPrefixLength( node[ pos ].reference.key( transform ) ) >= handleLength ) ) break; pos = ( pos + 1 ) & mask; } return pos; } /** Find the position in the table of a given handle using handles. * * <p>Note that this function compares handles. Thus, it always returns a correct value. * * @param s the signature of the prefix of <code>v</code> of <code>handleLength</code> bits. * @param v a bit vector. * @param handleLength the length of the prefix of <code>v</code> that will be used as a handle. * @return the position in the table where the specified handle can be found, or <code>null</code>. */ private int findExactPos( final long s, final BitVector v, final long handleLength ) { int pos = hash( s ); while( node[ pos ] != null ) { if ( signature[ pos ] == s && handleLength == node[ pos ].handleLength() && v.longestCommonPrefixLength( node[ pos ].reference.key( transform ) ) >= handleLength ) break; pos = ( pos + 1 ) & mask; } return pos; } public void clear() { length = INITIAL_LENGTH; mask = length - 1; size = 0; signature = new long[ length ]; node = new InternalNode[ length ]; dup = new boolean[ length ]; } public ObjectSet<LongArrayBitVector> keySet() { return new AbstractObjectSet<LongArrayBitVector>() { @Override public ObjectIterator<LongArrayBitVector> iterator() { return new AbstractObjectIterator<LongArrayBitVector>() { private int i = 0; private int pos = -1; @Override public boolean hasNext() { return i < size; } @Override public LongArrayBitVector next() { if ( ! hasNext() ) throw new NoSuchElementException(); while( node[ ++pos ] == null ); i++; return LongArrayBitVector.copy( node[ pos ].handle( transform ) ); } }; } @Override public boolean contains( Object o ) { BitVector v = (BitVector)o; return get( v, true ) != null; } @Override public int size() { return size; } }; } public ObjectSet<Node> values() { return new AbstractObjectSet<Node>() { @Override public ObjectIterator<Node> iterator() { return new AbstractObjectIterator<Node>() { private int i = 0; private int pos = -1; @Override public boolean hasNext() { return i < size; } @Override public Node next() { if ( ! hasNext() ) throw new NoSuchElementException(); while( node[ ++pos ] == null ); i++; return node[ pos ]; } }; } @Override public boolean contains( Object o ) { final Node node = (Node)o; return get( node.handle( transform ), true ) != null; } @Override public int size() { return size; } }; } /** Replaces an entry with a given node. * * @param newNode a node with a handle already existing in the table; the corresponding * node will be replaced. * @see #replace(long, InternalNode) */ public void replace( final InternalNode newNode ) { replace( newNode.handleHash( transform ), newNode ); } /** Replaces an entry with a given node. * * <p>Note that as long as the handle of <code>newNode</code> is actually in the * table this function will always perform correctly. Otherwise, the result is unpredictable. * * @param s the signature of <code>newNode</code>. * @param newNode a node with a handle already appearing in the table; the corresponding * node will be replaced. */ public void replace( long s, final InternalNode newNode ) { if ( SHORT_SIGNATURES ) s &= 0x3; final int pos = findPos( s, newNode.reference.key( transform ), newNode.handleLength() ); if ( ASSERTS ) assert node[ pos ] != null; if ( ASSERTS ) assert node[ pos ].handle( transform ).equals( newNode.handle( transform ) ) : node[ pos ].handle( transform ) + " != " + newNode.handle( transform ); node[ pos ] = newNode; if ( ASSERTS ) assertTable(); } /** Removes an existing entry from the table. * * <p>Note that as long as the given handle is actually in the * table this function will always perform correctly. Otherwise, the result is unpredictable. * * @param s the signature of the prefix of <code>v</code> of <code>handleLength</code> bits. * @param v a bit vector. * @param handleLength the length of the prefix of <code>v</code> that will be used as a handle. * @return true if some key was removed, but if the given handle was not in the table another handle * with the same signature might have been removed instead. */ public boolean remove( long s, final InternalNode v, final long handleLength ) { if ( DEBUG ) System.err.println( "Map.remove(" + s + ", " + v + ", " + handleLength + ")" ); if ( SHORT_SIGNATURES ) s &= 0x3; final int hash = hash( s ); int pos = hash; int lastDup = -1; // Keeps track of the last duplicate entry with the same signature. while( node[ pos ] != null ) { if ( signature[ pos ] == s ) { if ( ! dup[ pos ] || handleLength == node[ pos ].handleLength() && v.reference.key( transform ).longestCommonPrefixLength( node[ pos ].reference.key( transform ) ) >= handleLength ) break; else lastDup = pos; } pos = ( pos + 1 ) & mask; } // This should NOT happen, but let's return a sensible value anyway. if ( node[ pos ] == null ) return false; if ( ! dup[ pos ] && lastDup != -1 ) dup[ lastDup ] = false; // We are removing the only non-duplicate entry. // Move entries, compatibly with their hash code, to fill the hole. int candidateHole, h; do { candidateHole = pos; // Find candidate for a move (possibly empty). do { pos = ( pos + 1 ) & mask; if ( node[ pos ] == null ) break; h = hash( node[ pos ].handleHash( transform ) ); /* The hash h must lie cyclically between candidateHole and pos: more precisely, h must be after candidateHole * but before the first free entry in the table (which is equivalent to the previous statement). */ } while( candidateHole <= pos ? candidateHole < h && h <= pos : candidateHole < h || h <= pos ); node[ candidateHole ] = node[ pos ]; signature[ candidateHole ] = signature[ pos ]; dup[ candidateHole ] = dup[ pos ]; } while( node[ pos ] != null ); size--; if ( ASSERTS ) assertTable(); return true; } /** Adds a new entry to the table. * * @param v a node. * * @see #addNew(long, InternalNode) */ public void addNew( final InternalNode v ) { addNew( v.handleHash( transform ), v ); } /** Adds a new entry to the table. * * <p>Note that as long as the handle of the given node is not in the * table this function will always perform correctly. Otherwise, the result is unpredictable. * * @param s the signature of the handle of <code>v</code>. * @param v a node. */ public void addNew( long s, final InternalNode v ) { if ( SHORT_SIGNATURES ) s &= 0x3; if ( DEBUG ) System.err.println( "Map.addNew(" + s + ", " + v + ")" ); int pos = hash( s ); // Finds a free position, marking all keys with the same signature along the search path as duplicates. while( node[ pos ] != null ) { if ( signature[ pos ] == s ) dup[ pos ] = true; pos = ( pos + 1 ) & mask; } if ( ASSERTS ) assert node[ pos ] == null; size++; signature[ pos ] = s; node[ pos ] = v; if ( 3L * size > 2L * length ) { // Rehash. length *= 2; mask = length - 1; final long newKey[] = new long[ length ]; final InternalNode[] newValue = new InternalNode[ length ]; final boolean[] newDup = new boolean[ length ]; final long[] key = this.signature; final InternalNode[] value = this.node; for( int i = key.length; i-- != 0; ) { if ( value[ i ] != null ) { s = key[ i ]; pos = hash( s ); while( newValue[ pos ] != null ) { if ( newKey[ pos ] == s ) newDup[ pos ] = true; pos = ( pos + 1 ) & mask; } newKey[ pos ] = key[ i ]; newValue[ pos ] = value[ i ]; } } this.signature = newKey; this.node = newValue; this.dup = newDup; } if ( ASSERTS ) assertTable(); } public int size() { return size; } /** Retrives a node given its handle. * * @param v a bit vector. * @param handleLength the prefix of <code>v</code> that must be used as a handle. * @param s the signature of the specified handle. * @param exact whether the search should be exact; if false, and the given handle does not * appear in the table, it is possible that a wrong node is returned. * @return the node with given handle, or <code>null</code> if there is no such node. */ public InternalNode get( final BitVector v, final long handleLength, final long s, final boolean exact ) { if ( SHORT_SIGNATURES ) { final int pos = exact ? findExactPos( s & 0x3, v, handleLength ) : findPos( s & 0x3, v, handleLength ); return node[ pos ]; } else { final int pos = exact ? findExactPos( s, v, handleLength ) : findPos( s, v, handleLength ); return node[ pos ]; } } /** Retrives a node given its handle. * * @param handle a handle. * @param exact whether the search should be exact; if false, and the given handle does not * appear in the table, it is possible that a wrong node is returned. * @return the node with given handle, or <code>null</code> if there is no such node. * @see #get(BitVector, long, long, boolean) */ public InternalNode get( final BitVector handle, final boolean exact ) { return get( handle, handle.length(), Hashes.murmur( handle, 0 ), exact ); } public String toString() { StringBuilder s = new StringBuilder(); s.append( '{' ); for( LongArrayBitVector v: keySet() ) s.append( v ).append( " => " ).append( get( v, false ) ).append( ", " ); if ( s.length() > 1 ) s.setLength( s.length() - 2 ); s.append( '}' ); return s.toString(); } } /** A node of the trie. */ protected abstract static class Node { /** The length of the extent of the parent node, or 0 for the root. */ protected long parentExtentLength; /** The length of the extent (for leaves, this is equal to the length of the transformed {@link #key}). */ protected long extentLength; public boolean isLeaf() { return this instanceof Leaf; } public boolean isInternal() { return this instanceof InternalNode; } public long nameLength() { return parentExtentLength == 0 ? 0 : parentExtentLength + 1; } public long jumpLength() { final long handleLength = handleLength(); return handleLength + ( handleLength & -handleLength ); } public long handleLength() { return twoFattest( parentExtentLength, extentLength ); } public abstract BitVector key( TransformationStrategy<Object> transform ); public abstract BitVector handle( TransformationStrategy<Object> transform ); public abstract BitVector extent( TransformationStrategy<Object> transform ); public abstract boolean intercepts( final long h ); public long handleHash( TransformationStrategy<Object> transform ) { if ( SHORT_SIGNATURES ) return Hashes.murmur( handle( transform ), 0 ) & 0x3; else return Hashes.murmur( handle( transform ), 0 ); } /** Returns true if this node is the exit node of a string. * * @param v the string. * @param transform the transformation strategy used to build the trie this node belongs to. * @return true if the string exits at this node. */ public boolean isExitNodeOf( final LongArrayBitVector v, TransformationStrategy<Object> transform ) { return isExitNodeOf( v.length(), v.longestCommonPrefixLength( extent( transform ) ) ); } /** Returns true if this node is the exit node of a string given its length and the length of the longest * common prefix with the node extent. * * @param length the length of a string. * @param lcpLength the length of the longest common prefix between the string and the extent of this node. * @return true if the string exits at this node. */ public boolean isExitNodeOf( final long length, final long lcpLength ) { return parentExtentLength < lcpLength && ( lcpLength < extentLength || lcpLength == length ); } public String toString() { final TransformationStrategy transform = TransformationStrategies.prefixFreeIso(); return ( isLeaf() ? "[" : "(" ) + Integer.toHexString( hashCode() & 0xFFFF ) + ( key( transform ) == null ? "" : " " + ( extentLength > 16 ? key( transform ).subVector( 0, 8 ) + "..." + key( transform ).subVector( extentLength - 8, extentLength ): key( transform ).subVector( 0, extentLength ) ) ) + " (" + parentExtentLength + ".." + extentLength + "], " + handleLength() + "->" + jumpLength() + ( isLeaf() ? "]" : ")" ); } } /** A node of the trie. */ protected final static class InternalNode extends Node { /** The left subtree. */ protected Node left; /** The right subtree. */ protected Node right; /** The jump pointer for the left path for internal nodes; <code>null</code>, otherwise (this * makes leaves distinguishable). */ protected Node jumpLeft; /** The jump pointer for the right path for internal nodes; <code>null</code>, otherwise. */ protected Node jumpRight; /** The leaf whose key this node refers to for internal nodes; the internal node that * refers to the key of this leaf, otherwise. Will be <code>null</code> for exactly one leaf. */ protected Leaf reference; public boolean isLeaf() { return false; } public boolean isInternal() { return true; } public boolean intercepts( final long h ) { return h > parentExtentLength && h <= extentLength; } public BitVector extent( TransformationStrategy<Object> transform ) { return reference.key( transform ).subVector( 0, extentLength ); } @Override public BitVector key( TransformationStrategy<Object> transform ) { return reference.key( transform ); } public BitVector handle( TransformationStrategy<Object> transform ) { return reference.key( transform ).subVector( 0, handleLength() ); } } /** A node of the trie. */ protected final static class Leaf extends Node { /** The previous leaf. */ protected Leaf prev; /** The next leaf. */ protected Leaf next; /** The key upon which the extent of node is based, for internal nodes; the * key associated to a leaf, otherwise. */ protected CharSequence key; /** The leaf whose key this node refers to for internal nodes; the internal node that * refers to the key of this leaf, otherwise. Will be <code>null</code> for exactly one leaf. */ protected InternalNode reference; public boolean isLeaf() { return true; } public boolean isInternal() { return false; } public boolean intercepts( final long h ) { return h > parentExtentLength; } public BitVector extent( final TransformationStrategy<Object> transform ) { return key( transform ); } @Override public BitVector key( final TransformationStrategy<Object> transform ) { return transform.toBitVector( key ); } public BitVector handle( TransformationStrategy<Object> transform ) { return key( transform ).subVector( 0, handleLength() ); } } /** Creates a new z-fast trie using the given transformation strategy. * * @param transform a transformation strategy that must turn distinct elements into distinct, prefix-free bit vectors. */ @SuppressWarnings("unchecked") public ZFastTrie( final TransformationStrategy<? super T> transform ) { this.transform = transform; this.map = new Map( (TransformationStrategy<Object>)transform ); initHeadTail(); } private void initHeadTail() { head = new Leaf(); tail = new Leaf(); head.next = tail; tail.prev = head; } /** Creates a new z-fast trie using the given elements and transformation strategy. * * @param elements an iterator returning the elements among which the trie must be able to rank. * @param transform a transformation strategy that must turn distinct elements into distinct, prefix-free bit vectors. */ public ZFastTrie( final Iterator<? extends T> elements, final TransformationStrategy<? super T> transform ) { this( transform ); while( elements.hasNext() ) add( elements.next() ); } /** Creates a new z-fast trie using the given elements and transformation strategy. * * @param elements an iterator returning the elements among which the trie must be able to rank. * @param transform a transformation strategy that must turn distinct elements into distinct, prefix-free bit vectors. */ public ZFastTrie( final Iterable<? extends T> elements, final TransformationStrategy<? super T> transform ) { this( elements.iterator(), transform ); } public int size() { return size > Integer.MAX_VALUE ? -1 : (int)size; } /** Returns the 2-fattest number in an interval. * * <p>Note that to get the length of the handle of a node you must * call this function passing the length of the extent of the parent (one less * than the node name) and the length of the extent of the node. * * @param l left extreme (excluded). * @param r right extreme (included). * @return the 2-fattest number in (<code>l</code>..<code>r</code>]. */ private final static long twoFattest( final long l, final long r ) { return ( -1L << Fast.mostSignificantBit( l ^ r ) & r ); } private static void removeLeaf( final Leaf node ) { node.next.prev = node.prev; node.prev.next = node.next; } private static void addAfter( final Leaf pred, final Leaf node ) { node.next = pred.next; node.prev = pred; pred.next.prev = node; pred.next = node; } private static void addBefore( final Leaf succ, final Leaf node ) { node.prev = succ.prev; node.next = succ; succ.prev.next = node; succ.prev = node; } private void assertTrie() { /* Shortest key */ LongArrayBitVector root = null; /* Keeps track of which nodes in map are reachable using left/right from the root. */ ObjectOpenHashSet<Node> nodes = new ObjectOpenHashSet<Node>(); /* Keeps track of leaves. */ ObjectOpenHashSet<Leaf> leaves = new ObjectOpenHashSet<Leaf>(); /* Keeps track of reference to leaf keys in internal nodes. */ ObjectOpenHashSet<Object> references = new ObjectOpenHashSet<Object>(); assert size == 0 && map.size() == 0 || size == map.size() + 1; /* Search for the root (shortest handle) and check that nodes and handles do match. */ for( LongArrayBitVector v : map.keySet() ) { final long vHandleLength = map.get( v, false ).handleLength(); if ( root == null || map.get( root, false ).handleLength() > vHandleLength ) root = v; final InternalNode node = map.get( v, false ); nodes.add( node ); assert node.reference.reference == node : node + " -> " + node.reference + " -> " + node.reference.reference; } assert nodes.size() == map.size() : nodes.size() + " != " + map.size(); assert size < 2 || this.root == map.get( root, false ); if ( size > 1 ) { /* Verify doubly linked list of leaves. */ Leaf toRight = head.next, toLeft = tail.prev; for( int i = 1; i < size; i++ ) { assert new MutableString( toRight.key ).compareTo( toRight.next.key ) < 0 : toRight.key + " >= " + toRight.next.key + " " + toRight + " " + toRight.next; assert new MutableString( toLeft.key ).compareTo( toLeft.prev.key ) > 0 : toLeft.key + " >= " + toLeft.prev.key + " " + toLeft + " " + toLeft.prev; toRight = toRight.next; toLeft = toLeft.prev; } final int numNodes = visit( map.get( root, false ), null, 0, 0, nodes, leaves, references ); assert numNodes == 2 * size - 1 : numNodes + " != " + ( 2 * size - 1 ); assert leaves.size() == size; int c = 0; for( Leaf leaf: leaves ) if ( references.contains( leaf.key ) ) c++; assert c == size - 1 : c + " != " + ( size - 1 ); } else if ( size == 1 ) { assert head.next == this.root; assert tail.prev == this.root; } assert nodes.isEmpty(); } private int visit( final Node n, final Node parent, final long parentExtentLength, final int depth, ObjectOpenHashSet<Node> nodes, ObjectOpenHashSet<Leaf> leaves, ObjectOpenHashSet<Object> references ) { if ( n == null ) return 0; if ( DEBUG ) { for( int i = depth; i-- != 0; ) System.err.print( '\t' ); System.err.println( "Node " + n + " (parent extent length: " + parentExtentLength + ")" + ( n.isInternal() ? " Jump left: " + ((InternalNode)n).jumpLeft + " Jump right: " + ((InternalNode)n).jumpRight : "" ) ); } assert parent == null || parent.extent( (TransformationStrategy<Object>)transform ).equals( n.extent( (TransformationStrategy<Object>)transform ).subVector( 0, parent.extentLength ) ); assert parentExtentLength < n.extentLength; assert n.parentExtentLength == parentExtentLength : n.parentExtentLength + " != " + parentExtentLength + " " + n; if ( n.isInternal() ) { assert references.add( ((InternalNode)n).reference.key ); assert nodes.remove( n ) : n; assert map.keySet().contains( n.handle( (TransformationStrategy<Object>)transform ) ) : n; /* Check that jumps are correct. */ final long jumpLength = n.jumpLength(); Node jumpLeft = ((InternalNode)n).left; while( jumpLeft.isInternal() && jumpLength > jumpLeft.extentLength ) jumpLeft = ((InternalNode)jumpLeft).left; assert jumpLeft == ((InternalNode)n).jumpLeft : jumpLeft + " != " + ((InternalNode)n).jumpLeft + " (node: " + n + ")"; Node jumpRight = ((InternalNode)n).right; while( jumpRight.isInternal() && jumpLength > jumpRight.extentLength ) jumpRight = ((InternalNode)jumpRight).right; assert jumpRight == ((InternalNode)n).jumpRight : jumpRight + " != " + ((InternalNode)n).jumpRight + " (node: " + n + ")"; return 1 + visit( ((InternalNode)n).left, n, n.extentLength, depth + 1, nodes, leaves, references ) + visit( ((InternalNode)n).right, n, n.extentLength, depth + 1, nodes, leaves, references ); } else { assert leaves.add( (Leaf)n ); assert n.extentLength == n.key( (TransformationStrategy<Object>)transform ).length(); return 1; } } /** Sets the jump pointers of a node by searching exhaustively for * handles that are jumps of the node handle length. * * @param node the node whose jump pointers must be set. */ private static void setJumps( final InternalNode node ) { if ( DEBUG ) System.err.println( "setJumps(" + node + ")" ); final long jumpLength = node.jumpLength(); Node jump; for( jump = node.left; jump.isInternal() && jumpLength > jump.extentLength; ) jump = ((InternalNode)jump).jumpLeft; if ( ASSERTS ) assert jump.intercepts( jumpLength ) : jumpLength + " not in " + "(" + jump.parentExtentLength + ".." + jump.extentLength + "] " + jump; node.jumpLeft = jump; for( jump = node.right; jump.isInternal() && jumpLength > jump.extentLength; ) jump = ((InternalNode)jump).jumpRight; if ( ASSERTS ) assert jump.intercepts( jumpLength ) : jumpLength + " not in " + "(" + jump.parentExtentLength + ".." + jump.extentLength + "] " + jump; node.jumpRight = jump; } /** Fixes the right jumps of the ancestors of a node after an insertion. * * @param exitNode the exit node. * @param rightChild * @param leaf the new leaf. * @param stack a stack containing the 2-fat ancestors of <code>exitNode</code>. */ private static void fixRightJumpsAfterInsertion( final InternalNode internal, Node exitNode, boolean rightChild, Leaf leaf, final ObjectArrayList<InternalNode> stack ) { if ( DEBUG ) System.err.println( "fixRightJumpsAfterInsertion(" + internal + ", " + exitNode + ", " + rightChild + ", " + leaf + ", " + stack ); final long lcp = leaf.parentExtentLength; InternalNode toBeFixed = null; long jumpLength = -1; if ( ! rightChild ) { /* Nodes jumping to the left into the exit node but above the lcp must point to internal. */ while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpLeft != exitNode ) break; if ( jumpLength <= lcp ) toBeFixed.jumpLeft = internal; } } else { while( ! stack.isEmpty() ) { toBeFixed = stack.top(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpRight != exitNode || jumpLength > lcp ) break; toBeFixed.jumpRight = internal; stack.pop(); } while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); while( exitNode.isInternal() && toBeFixed.jumpRight != exitNode ) exitNode = ((InternalNode)exitNode).jumpRight; if ( toBeFixed.jumpRight != exitNode ) return; toBeFixed.jumpRight = leaf; } } } /** Fixes the left jumps of the ancestors of a node after an insertion. * * @param exitNode the exit node. * @param rightChild * @param leaf the new leaf. * @param stack a stack containing the fat ancestors of <code>exitNode</code>. */ private static void fixLeftJumpsAfterInsertion( final InternalNode internal, Node exitNode, boolean rightChild, Leaf leaf, final ObjectArrayList<InternalNode> stack ) { if ( DEBUG ) System.err.println( "fixLeftJumpsAfterInsertion(" + internal + ", " + exitNode + ", " + rightChild + ", " + leaf + ", " + stack ); final long lcp = leaf.parentExtentLength; InternalNode toBeFixed = null; long jumpLength = -1; if ( rightChild ) { /* Nodes jumping to the right into the exit node but above the lcp must point to internal. */ while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpRight != exitNode ) break; if ( jumpLength <= lcp ) toBeFixed.jumpRight = internal; } } else { while( ! stack.isEmpty() ) { toBeFixed = stack.top(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpLeft != exitNode || jumpLength > lcp ) break; toBeFixed.jumpLeft = internal; stack.pop(); } while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); while( exitNode.isInternal() && toBeFixed.jumpLeft != exitNode ) exitNode = ((InternalNode)exitNode).jumpLeft; if ( toBeFixed.jumpLeft != exitNode ) return; toBeFixed.jumpLeft = leaf; } } } /** Fixes the right jumps of the ancestors of a node after a deletion. * * @param parentExitNode the exit node. * @param rightChild * @param exitNode the new leaf. * @param stack a stack containing the 2-fat ancestors of <code>exitNode</code>. */ private static void fixRightJumpsAfterDeletion( Node otherNode, InternalNode parentExitNode, boolean rightChild, Leaf exitNode, final ObjectArrayList<InternalNode> stack ) { if ( DEBUG ) System.err.println( "fixRightJumpsAfterDeletion(" + otherNode + ", " + parentExitNode + ", " + rightChild + ", " + exitNode + ", " + stack ); InternalNode toBeFixed = null; long jumpLength = -1; if ( ! rightChild ) { /* Nodes jumping to the left into the exit node but above the lcp must point to internal. */ while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpLeft != parentExitNode ) break; toBeFixed.jumpLeft = otherNode; } } else { while( ! stack.isEmpty() ) { toBeFixed = stack.top(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpRight != parentExitNode ) break; toBeFixed.jumpRight = otherNode; stack.pop(); } while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); if ( toBeFixed.jumpRight != exitNode ) break; jumpLength = toBeFixed.jumpLength(); while( ! otherNode.intercepts( jumpLength ) ) otherNode = ((InternalNode)otherNode).jumpRight; toBeFixed.jumpRight = otherNode; } } } /** Fixes the left jumps of the ancestors of a node after a deletion. * * @param parentExitNode the exit node. * @param rightChild * @param exitNode the new leaf. * @param stack a stack containing the 2-fat ancestors of <code>exitNode</code>. */ private static void fixLeftJumpsAfterDeletion( Node otherNode, InternalNode parentExitNode, boolean rightChild, Leaf exitNode, final ObjectArrayList<InternalNode> stack ) { if ( DEBUG ) System.err.println( "fixLeftJumpsAfterDeletion(" + otherNode + ", " + parentExitNode + ", " + rightChild + ", " + exitNode + ", " + stack ); InternalNode toBeFixed = null; long jumpLength = -1; if ( rightChild ) { /* Nodes jumping to the left into the exit node but above the lcp must point to internal. */ while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpRight != parentExitNode ) break; toBeFixed.jumpRight = otherNode; } } else { while( ! stack.isEmpty() ) { toBeFixed = stack.top(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpLeft != parentExitNode ) break; toBeFixed.jumpLeft = otherNode; stack.pop(); } while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); if ( toBeFixed.jumpLeft != exitNode ) break; jumpLength = toBeFixed.jumpLength(); while( ! otherNode.intercepts( jumpLength ) ) otherNode = ((InternalNode)otherNode).jumpLeft; toBeFixed.jumpLeft = otherNode; } } } @SuppressWarnings("unchecked") public boolean remove( final Object k ) { final LongArrayBitVector v = LongArrayBitVector.copy( transform.toBitVector( (T)k ) ); if ( DEBUG ) System.err.println( "remove(" + v + ")" ); if ( DEBUG ) System.err.println( "Map: " + map + " root: " + root ); if ( size == 0 ) return false; if ( size == 1 ) { if ( ! ((Leaf)root).key.equals( k ) ) return false; root = null; size = 0; if ( ASSERTS ) assertTrie(); return true; } final ObjectArrayList<InternalNode> stack = new ObjectArrayList<InternalNode>( 64 ); InternalNode parentExitNode; boolean rightLeaf, rightChild = false; Node exitNode; long lcp; final long[] state = Hashes.preprocessMurmur( v, 0 ); ParexData parexData = getParentExitNode( v, state, stack ); parentExitNode = parexData.parexNode; rightLeaf = parentExitNode != null && parentExitNode.extentLength < v.length() && v.getBoolean( parentExitNode.extentLength ); exitNode = parexData.exitNode; lcp = parexData.lcp; if ( DDEBUG ) System.err.println( "Parex node: " + parentExitNode + " Exit node: " + exitNode + " LCP: " + lcp ); if ( ! ( exitNode.isLeaf() && ((Leaf)exitNode).key.equals( k ) ) ) return false; // Not found final Node otherNode = rightLeaf ? parentExitNode.left : parentExitNode.right; final boolean otherNodeIsInternal = otherNode.isInternal(); if ( parentExitNode != null && parentExitNode != root ) { // Let us fix grandpa's child pointer. InternalNode grandParentExitNode = getGrandParentExitNode( v, state, stack ); if ( rightChild = ( grandParentExitNode.right == parentExitNode ) ) grandParentExitNode.right = otherNode; else grandParentExitNode.left = otherNode; } final long parentExitNodehandleLength = parentExitNode.handleLength(); final long otherNodeHandleLength = otherNode.handleLength(); final long t = parentExitNodehandleLength | otherNodeHandleLength; final boolean cutLow = ( t & -t & otherNodeHandleLength ) != 0; if ( parentExitNode == root ) root = otherNode; // Fix leaf reference if not null final InternalNode refersToExitNode = ((Leaf)exitNode).reference; if ( refersToExitNode == null ) parentExitNode.reference.reference = null; else { refersToExitNode.reference = parentExitNode.reference; refersToExitNode.reference.reference = refersToExitNode; } // Fix doubly-linked list removeLeaf( (Leaf)exitNode ); if ( DDEBUG ) System.err.println( "Cut " + ( cutLow ? "low" : "high") + "; leaf on the " + ( rightLeaf ? "right" : "left") + "; other node is " + ( otherNodeIsInternal ? "internal" : "a leaf") ); if ( rightLeaf ) fixRightJumpsAfterDeletion( otherNode, parentExitNode, rightChild, (Leaf)exitNode, stack ); else fixLeftJumpsAfterDeletion( otherNode, parentExitNode, rightChild, (Leaf)exitNode, stack ); if ( cutLow && otherNodeIsInternal ) { map.remove( Hashes.murmur( otherNode.key( (TransformationStrategy<Object>)transform ), otherNodeHandleLength, state, parentExitNode.extentLength ), (InternalNode)otherNode, otherNodeHandleLength ); otherNode.parentExtentLength = parentExitNode.parentExtentLength; map.replace( Hashes.murmur( v, parentExitNodehandleLength, state ), (InternalNode)otherNode ); setJumps( (InternalNode)otherNode ); } else { otherNode.parentExtentLength = parentExitNode.parentExtentLength; map.remove( Hashes.murmur( v, parentExitNodehandleLength, state ), parentExitNode, parentExitNodehandleLength ); } size--; if ( ASSERTS ) { assertTrie(); assert ! contains( k ); } return true; } @Override public boolean add( final T k ) { if ( DEBUG ) System.err.println( "add(" + k + ")" ); final LongArrayBitVector v = LongArrayBitVector.copy( transform.toBitVector( k ) ); if ( DEBUG ) System.err.println( "add(" + v + ")" ); if ( DEBUG ) System.err.println( "Map: " + map + " root: " + root ); if ( size == 0 ) { root = new Leaf(); ((Leaf)root).key = (CharSequence)k; root.parentExtentLength = 0; root.extentLength = v.length(); ((Leaf)root).reference = null; addAfter( head, (Leaf)root ); size++; if ( ASSERTS ) assertTrie(); return true; } final ObjectArrayList<InternalNode> stack = new ObjectArrayList<InternalNode>( 64 ); InternalNode parentExitNode; boolean rightChild; Node exitNode; long lcp; final long[] state = Hashes.preprocessMurmur( v, 0 ); ParexData parexData = getParentExitNode( v, state, stack ); parentExitNode = parexData.parexNode; exitNode = parexData.exitNode; lcp = parexData.lcp; rightChild = parentExitNode != null && parentExitNode.extentLength < v.length() && v.getBoolean( parentExitNode.extentLength ); if ( DDEBUG ) System.err.println( "Parex node: " + parentExitNode + " Exit node: " + exitNode + " LCP: " + lcp ); if ( exitNode.isLeaf() && ((Leaf)exitNode).key.equals( k ) ) return false; // Already there final boolean exitDirection = v.getBoolean( lcp ); final long exitNodeHandleLength = exitNode.handleLength(); final boolean cutLow = lcp >= exitNodeHandleLength; Leaf leaf = new Leaf(); InternalNode internal = new InternalNode(); final boolean exitNodeIsInternal = exitNode.isInternal(); leaf.key = (CharSequence)k; leaf.parentExtentLength = lcp; leaf.extentLength = v.length(); leaf.reference = internal; internal.reference = leaf; internal.parentExtentLength = exitNode.parentExtentLength; internal.extentLength = lcp; if ( exitDirection ) { internal.jumpRight = internal.right = leaf; internal.left = exitNode; internal.jumpLeft = cutLow && exitNodeIsInternal ? ((InternalNode)exitNode).jumpLeft : exitNode; } else { internal.jumpLeft = internal.left = leaf; internal.right = exitNode; internal.jumpRight = cutLow && exitNodeIsInternal ? ((InternalNode)exitNode).jumpRight : exitNode; } if ( exitNode == root ) root = internal; // Update root else { if ( rightChild ) parentExitNode.right = internal; else parentExitNode.left = internal; } if ( DDEBUG ) System.err.println( "Cut " + ( cutLow ? "low" : "high") + "; exit to the " + ( exitDirection ? "right" : "left") ); if ( exitDirection ) fixRightJumpsAfterInsertion( internal, exitNode, rightChild, leaf, stack ); else fixLeftJumpsAfterInsertion( internal, exitNode, rightChild, leaf, stack ); if ( cutLow && exitNodeIsInternal ) { map.replace( Hashes.murmur( v, exitNodeHandleLength, state ), internal ); exitNode.parentExtentLength = lcp; map.addNew( Hashes.murmur( exitNode.key( (TransformationStrategy<Object>)transform ), exitNode.handleLength(), state, lcp ), (InternalNode)exitNode ); setJumps( (InternalNode)exitNode ); } else { exitNode.parentExtentLength = lcp; map.addNew( Hashes.murmur( v, internal.handleLength(), state ), internal ); } if ( DEBUG ) System.err.println( "After insertion, map: " + map + " root: " + root ); size++; /* We find a predecessor or successor to insert the new leaf in the doubly linked list. */ if ( exitDirection ) { while( exitNode.isInternal() ) exitNode = ((InternalNode)exitNode).jumpRight; addAfter( (Leaf)exitNode, leaf ); } else { while( exitNode.isInternal() ) exitNode = ((InternalNode)exitNode).jumpLeft; addBefore( (Leaf)exitNode, leaf ); } if ( ASSERTS ) assertTrie(); if ( ASSERTS ) assert contains( k ); return true; } /** Returns the exit node of a given bit vector. * * @param v a bit vector. * @return the exit node of <code>v</code>. */ private Node getExitNode( final LongArrayBitVector v, final long[] state ) { if ( size == 0 ) throw new IllegalStateException(); if ( size == 1 ) return root; if ( DDEBUG ) System.err.println( "getExitNode(" + v + ")" ); final long length = v.length(); // This can be the exit node of v, the parex node of v, or something completely wrong. InternalNode parexOrExitNode = fatBinarySearch( v, state, null, false, 0, length ); // This will contain the exit node if parexOrExitNode contains the correct parex node. Node candidateExitNode; if ( parexOrExitNode == null ) candidateExitNode = root; else candidateExitNode = parexOrExitNode.extentLength < length && v.getBoolean( parexOrExitNode.extentLength ) ? parexOrExitNode.right : parexOrExitNode.left; /* This lcp length makes it possible to compute the length of the lcp between v and * parexOrExitNode by minimisation with the extent length, as necessarily the extent of * candidateExitNode is an extension of the extent of parexOrExitNode. */ final long lcpLength = v.longestCommonPrefixLength( candidateExitNode.extent( (TransformationStrategy<Object>)transform ) ); // In this case the fat binary search gave us the correct parex node. if ( candidateExitNode.isExitNodeOf( length, lcpLength ) ) return candidateExitNode; // In this case the fat binary search gave us the correct exit node. if ( parexOrExitNode.isExitNodeOf( length, Math.min( parexOrExitNode.extentLength, lcpLength ) ) ) return parexOrExitNode; // Otherwise, something went horribly wrong. We restart in exact mode. parexOrExitNode = fatBinarySearch( v, state, null, true, 0, length ); if ( parexOrExitNode == null ) return root; // TODO: In principle we can check just the compacted path. return parexOrExitNode.extent( (TransformationStrategy<Object>)transform ).isProperPrefix( v ) ? parexOrExitNode.extentLength < length && v.getBoolean( parexOrExitNode.extentLength ) ? parexOrExitNode.right : parexOrExitNode.left : parexOrExitNode; } protected final static class ParexData { long lcp; InternalNode parexNode; Node exitNode; protected ParexData( final InternalNode parexNode, final Node exitNode, final long lcp ) { this.lcp = lcp; this.parexNode = parexNode; this.exitNode = exitNode; } } /** Returns the parent of the exit node of a given bit vector. * * @param v a bit vector. * @param stack if not <code>null</code>, a stack that will be filled with the <em>fat nodes</em> along the path to the parent of the exit node. * @return the parent of the exit node of <code>v</code>, or <code>null</code> if the exit node is the root. */ public ParexData getParentExitNode( final LongArrayBitVector v, final long[] state, final ObjectArrayList<InternalNode> stack ) { if ( size == 0 ) throw new IllegalStateException(); if ( size == 1 ) return new ParexData( null, root, v.longestCommonPrefixLength( root.extent( (TransformationStrategy<Object>)transform ) ) ); final long length = v.length(); // This can be the exit node of v, the parex node of v, or something completely wrong. InternalNode parexOrExitNode = fatBinarySearch( v, state, null, false, 0, length ); // This will contain the exit node if parexOrExitNode contains the correct parex node. Node candidateExitNode; if ( parexOrExitNode == null ) candidateExitNode = root; else candidateExitNode = parexOrExitNode.extentLength < length && v.getBoolean( parexOrExitNode.extentLength ) ? parexOrExitNode.right : parexOrExitNode.left; /* This lcp length makes it possible to compute the length of the lcp between v and * parexOrExitNode by minimisation with the extent length, as necessarily the extent of * candidateExitNode is an extension of the extent of parexOrExitNode. */ long lcpLength = v.longestCommonPrefixLength( candidateExitNode.extent( (TransformationStrategy<Object>)transform ) ); System.err.println ( "++++++++" + lcpLength ); // In this case the fat binary search gave us the correct parex node, and we have all the data we need. if ( candidateExitNode.isExitNodeOf( length, lcpLength ) ) return new ParexData( parexOrExitNode, candidateExitNode, lcpLength ); // In this case the fat binary search gave us the correct *exit* node. We must pop it from the stack and maybe restart the search. lcpLength = Math.min( parexOrExitNode.extentLength, lcpLength ); System.err.println ( "********" ); if ( parexOrExitNode.isExitNodeOf( length, lcpLength ) ) { stack.pop(); // We're lucky: the second element on the stack is the parex node. if ( stack.top().extentLength == parexOrExitNode.parentExtentLength ) return new ParexData( stack.top(), parexOrExitNode, lcpLength ); final long startingPoint = stack.isEmpty() ? 0 : stack.top().extentLength; final int stackSize = stack.size(); // Unless there are mistakes, this is really the parex node. InternalNode parexNode = fatBinarySearch( v, state, stack, false, startingPoint, parexOrExitNode.parentExtentLength ); // Something went wrong with the last search. We can just, at this point, restart in exact mode. // TODO: check length? if ( ! v.equals( parexNode.extent( (TransformationStrategy<Object>)transform ), startingPoint, parexNode.extentLength ) ) { stack.size( stackSize ); parexNode = fatBinarySearch( v, state, stack, true, startingPoint, parexOrExitNode.parentExtentLength ); } return new ParexData( parexNode, parexOrExitNode, lcpLength ); } throw new IllegalStateException(); } /** Returns the parent of the exit node of a given bit vector. * * @param v a bit vector. * @param stack if not <code>null</code>, a stack that will be filled with the <em>fat nodes</em> along the path to the parent of the exit node. * @param exact if true, the map defining the trie will be accessed in exact mode. * @return the parent of the exit node of <code>v</code>, or <code>null</code> if the exit node is the root; * if <code>exact</code> is false, with low probability * the result might be wrong. */ public InternalNode getGrandParentExitNode( final LongArrayBitVector v, final long[] state, final ObjectArrayList<InternalNode> stack, final boolean exact ) { final InternalNode parentExitNode = stack.pop(); if ( stack.isEmpty() ) return fatBinarySearch( v, state, stack, exact, 0, parentExitNode.parentExtentLength ); if ( stack.top().extentLength == parentExitNode.parentExtentLength ) return stack.top(); return fatBinarySearch( v, state, stack, exact, stack.top().extentLength, parentExitNode.parentExtentLength ); } /** Returns the parent of the exit node of a given bit vector. * * @param v a bit vector. * @param stack if not <code>null</code>, a stack that will be filled with the <em>fat nodes</em> along the path to the parent of the exit node. * @return the parent of the exit node of <code>v</code>, or <code>null</code> if the exit node is the root; * if <code>exact</code> is false, with low probability * the result might be wrong. */ public InternalNode getGrandParentExitNode( final LongArrayBitVector v, final long[] state, final ObjectArrayList<InternalNode> stack ) { // TODO: make it work with non-exact search return getGrandParentExitNode( v, state, stack, true ); /*stack.clear(); InternalNode parentExitNode = getParentExitNode( v, state, null ); return getParentExitNode( v.copy( 0, parentExitNode.extentLength ), state, stack );*/ } private InternalNode fatBinarySearch( final LongArrayBitVector v, final long[] state, final ObjectArrayList<InternalNode> stack, final boolean exact, long a, long b ) { InternalNode node = null, top = stack == null || stack.isEmpty() ? null : stack.top(); //System.err.println( "Fat binary " + v + " " + stack + " (" + l + ".." + r + ") " + exact ); final int logLength = Fast.mostSignificantBit( b ); while( b - a > 0 ) { if ( ASSERTS ) assert logLength > -1; if ( DDEBUG ) System.err.println( "(" + a + ".." + b + "]" ); final long f = twoFattest( a, b ); if ( DDEBUG ) System.err.println( "Inquiring with key " + v.subVector( 0, f ) + " (" + f + ")" ); node = map.get( v, f, Hashes.murmur( v, f, state ), exact ); final long g; // Note that this test is just to catch false positives if ( node == null || ( g = node.extentLength ) < f ) { if ( DDEBUG ) System.err.println( "Missing" ); b = f - 1; } else { if ( DDEBUG ) System.err.println( "Found extent of length " + g ); if ( stack != null ) stack.push( node ); top = node; a = g; } } /* if ( parent == null || secondRound || equals( parent.reference.key, v, lastHandleLength, parent.extentLength ) ) break; secondRound = true; //System.err.println( "** " + v ); if ( stack != null ) stack.pop(); r = parent.parentExtentLength + 1; if ( last == null ) { l = 0; parent = null; } else { parent = last; l = parent.extentLength; if ( l == r ) break; } last = null; //System.err.println( "Restarting with (" + l + ".." + r + ")" ); //System.err.println( map ); }*/ if ( DDEBUG ) System.err.println( "Final length " + a + " node: " + top ); if ( false && ASSERTS ) { boolean rightChild; Node exitNode; long lcp; rightChild = top != null && top.extentLength < v.length() && v.getBoolean( top.extentLength ); exitNode = top == null ? root : ( rightChild ? top.right : top.left ); lcp = exitNode.key( (TransformationStrategy<Object>)transform ).longestCommonPrefixLength( v ); if ( exitNode.intercepts( lcp ) ) { // We can do asserts only if the result is correct /* If parent is null, the extent of the root must not be a prefix of v. */ if ( top == null ) assert root.key( (TransformationStrategy<Object>)transform ).longestCommonPrefixLength( v ) < root.extentLength; else { assert top.extentLength == a; /* If parent is not null, the extent of the parent must be a prefix of v, * and the extent of the exit node must be either v, or not a prefix of v. */ assert ! exact || top.extent( (TransformationStrategy<Object>)transform ).longestCommonPrefixLength( v ) == top.extentLength; if ( stack != null ) { /** We check that the stack contains exactly all handles that are backjumps * of the length of the extent of the parent. */ a = top.extentLength; while( a != 0 ) { final Node t = map.get( top.key( (TransformationStrategy<Object>)transform ).subVector( 0, a ), true ); if ( t != null ) assert stack.contains( t ); a ^= ( a & -a ); } /** We check that the stack contains the nodes you would obtain by searching from * the top for nodes to fix. */ long left = 0; for( int i = 0; i < stack.size(); i++ ) { assert stack.get( i ).handleLength() == twoFattest( left, top.extentLength ) : stack.get( i ).handleLength() + " != " + twoFattest( left, top.extentLength ) + " " + i + " " + stack ; left = stack.get( i ).extentLength; } } } } } return top; } @SuppressWarnings("unchecked") public boolean contains( final Object o ) { if ( DEBUG ) System.err.println( "contains(" + o + ")" ); if ( size == 0 ) return false; final LongArrayBitVector v = LongArrayBitVector.copy( transform.toBitVector( (T)o ) ); final long[] state = Hashes.preprocessMurmur( v, 0 ); final Node exitNode = getExitNode( v, state ); return exitNode.isLeaf() && ((Leaf)exitNode).key.equals( o ); } @SuppressWarnings("unchecked") public CharSequence pred( final Object o ) { if ( size == 0 ) return null; final LongArrayBitVector v = LongArrayBitVector.copy( transform.toBitVector( (T)o ) ); final long[] state = Hashes.preprocessMurmur( v, 0 ); Node exitNode = getExitNode( v, state ); if ( v.compareTo( exitNode.extent( (TransformationStrategy<Object>)transform ) ) <= 0 ) { while( exitNode.isInternal() && ((InternalNode)exitNode).jumpRight != null ) exitNode = ((InternalNode)exitNode).jumpRight; return ((Leaf)exitNode).key; } else { while( exitNode.isInternal() && ((InternalNode)exitNode).jumpLeft != null ) exitNode = ((InternalNode)exitNode).jumpLeft; return ((Leaf)exitNode).prev.key; } } @SuppressWarnings("unchecked") public CharSequence succ( final Object o ) { if ( size == 0 ) return null; final LongArrayBitVector v = LongArrayBitVector.copy( transform.toBitVector( (T)o ) ); final long[] state = Hashes.preprocessMurmur( v, 0 ); Node exitNode = getExitNode( v, state ); if ( v.compareTo( exitNode.extent( (TransformationStrategy<Object>)transform ) ) <= 0 ) { while( exitNode.isInternal() && ((InternalNode)exitNode).jumpLeft != null ) exitNode = ((InternalNode)exitNode).jumpLeft; return ((Leaf)exitNode).key; } else { while( exitNode.isInternal() && ((InternalNode)exitNode).jumpRight != null ) exitNode = ((InternalNode)exitNode).jumpRight; return ((Leaf)exitNode).next.key; } } private void writeObject( final ObjectOutputStream s ) throws IOException { s.defaultWriteObject(); if ( size > 0 ) writeNode( root, (TransformationStrategy<Object>)transform, s ); } private static void writeNode( final Node node, final TransformationStrategy<Object> transform, final ObjectOutputStream s ) throws IOException { s.writeBoolean( node.isInternal() ); s.writeLong( node.extentLength - node.parentExtentLength ); if ( node.isInternal() ) { writeNode( ((InternalNode)node).left, transform, s ); writeNode( ((InternalNode)node).right, transform, s ); } else s.writeObject( ((Leaf)node).key ); } private void readObject( final ObjectInputStream s ) throws IOException, ClassNotFoundException { s.defaultReadObject(); initHeadTail(); map = new Map( size, (TransformationStrategy<Object>)transform ); if ( size > 0 ) root = readNode( s, 0, 0, map, new ObjectArrayList<Leaf>(), new ObjectArrayList<InternalNode>(), new IntArrayList(), new IntArrayList(), new BooleanArrayList() ); if ( ASSERTS ) assertTrie(); } /** Reads recursively a node of the trie. * * @param s the object input stream. * @param depth the depth of the node to be read. * @param parentExtentLength the length of the extent of the parent node. * @param map the map representing the trie. * @param leafStack a stack that cumulates leaves as they are found: internal nodes extract references from this stack when their visit is completed. * @param jumpStack a stack that cumulates nodes that need jump pointer fixes. * @param depthStack a stack parallel to <code>jumpStack</code>, providing the depth of the corresponding node. * @param segmentStack a stack of integers representing the length of maximal constant subsequences of the string of directions taken up to the current node; for instance, if we reached the current node by 1/1/0/0/0/1/0/0, the stack will contain 2,3,1,2. * @param dirStack a stack parallel to <code>segmentStack</code>: for each element, whether it counts left or right turns. * @return the subtree rooted at the next node in the stream. */ private Node readNode( final ObjectInputStream s, final int depth, final long parentExtentLength, final Map map, final ObjectArrayList<Leaf> leafStack, final ObjectArrayList<InternalNode> jumpStack, final IntArrayList depthStack, final IntArrayList segmentStack, final BooleanArrayList dirStack ) throws IOException, ClassNotFoundException { final boolean isInternal = s.readBoolean(); final long pathLength = s.readLong(); final Node node = isInternal ? new InternalNode() : new Leaf(); node.parentExtentLength = parentExtentLength; node.extentLength = parentExtentLength + pathLength; if ( ! dirStack.isEmpty() ) { /* We cannot fix the jumps of nodes that are more than this number of levels up in the tree. */ final int maxDepthDelta = segmentStack.topInt(); final boolean dir = dirStack.topBoolean(); InternalNode anc; int d; long jumpLength; do { jumpLength = ( anc = jumpStack.top() ).jumpLength(); d = depthStack.topInt(); /* To be fixable, a node must be within the depth limit, and we must intercept its jump length (note that * we cannot use .intercept() as the state of node is not yet consistent). If a node cannot be fixed, no * node higher in the stack can. */ if ( depth - d <= maxDepthDelta && jumpLength > parentExtentLength && ( ! isInternal || jumpLength <= node.extentLength ) ) { //if ( DDEBUG ) System.err.println( "Setting " + ( dir ? "right" : "left" ) + " jump pointer of " + anc + " to " + node ); if ( dir ) anc.jumpRight = node; else anc.jumpLeft = node; jumpStack.pop(); depthStack.popInt(); } else break; } while( ! jumpStack.isEmpty() ); } if ( isInternal ) { if ( dirStack.isEmpty() || dirStack.topBoolean() != false ) { segmentStack.push( 1 ); dirStack.push( false ); } else segmentStack.push( segmentStack.popInt() + 1 ); jumpStack.push( (InternalNode)node ); depthStack.push( depth ); if ( DEBUG ) System.err.println( "Recursing into left node... " ); ((InternalNode)node).left = readNode( s, depth + 1, node.extentLength, map, leafStack, jumpStack, depthStack, segmentStack, dirStack ); int top = segmentStack.popInt(); if ( top != 1 ) segmentStack.push( top - 1 ); else dirStack.popBoolean(); if ( dirStack.isEmpty() || dirStack.topBoolean() != true ) { segmentStack.push( 1 ); dirStack.push( true ); } else segmentStack.push( segmentStack.popInt() + 1 ); jumpStack.push( (InternalNode)node ); depthStack.push( depth ); if ( DEBUG ) System.err.println( "Recursing into right node... " ); ((InternalNode)node).right = readNode( s, depth + 1, node.extentLength, map, leafStack, jumpStack, depthStack, segmentStack, dirStack ); top = segmentStack.popInt(); if ( top != 1 ) segmentStack.push( top - 1 ); else dirStack.popBoolean(); /* We assign the reference leaf, and store the associated key. */ final Leaf referenceLeaf = leafStack.pop(); ((InternalNode)node).reference = referenceLeaf; referenceLeaf.reference = (InternalNode)node; map.addNew( (InternalNode)node ); if ( ASSERTS ) { // Check jump pointers. Node t; t = ((InternalNode)node).left; while( t.isInternal() && ! t.intercepts( node.jumpLength() ) ) t = ((InternalNode)t).left; assert ((InternalNode)node).jumpLeft == t : ((InternalNode)node).jumpLeft + " != " + t + " (" + node + ")"; t = ((InternalNode)node).right; while( t.isInternal() && ! t.intercepts( node.jumpLength() ) ) t = ((InternalNode)t).right; assert ((InternalNode)node).jumpRight == t : ((InternalNode)node).jumpRight + " != " + t + " (" + node + ")"; } } else { ((Leaf)node).key = (CharSequence)s.readObject(); leafStack.push( (Leaf)node ); addBefore( tail, (Leaf)node ); } return node; } public static void main( final String[] arg ) throws NoSuchMethodException, IOException, JSAPException { final SimpleJSAP jsap = new SimpleJSAP( ZFastTrie.class.getName(), "Builds an PaCo trie-based monotone minimal perfect hash function reading a newline-separated list of strings.", new Parameter[] { new FlaggedOption( "encoding", ForNameStringParser.getParser( Charset.class ), "UTF-8", JSAP.NOT_REQUIRED, 'e', "encoding", "The string file encoding." ), new Switch( "iso", 'i', "iso", "Use ISO-8859-1 coding internally (i.e., just use the lower eight bits of each character)." ), new Switch( "bitVector", 'b', "bit-vector", "Build a trie of bit vectors, rather than a trie of strings." ), new Switch( "zipped", 'z', "zipped", "The string list is compressed in gzip format." ), new UnflaggedOption( "trie", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.REQUIRED, JSAP.NOT_GREEDY, "The filename for the serialised z-fast trie." ), new UnflaggedOption( "stringFile", JSAP.STRING_PARSER, "-", JSAP.NOT_REQUIRED, JSAP.NOT_GREEDY, "The name of a file containing a newline-separated list of strings, or - for standard input." ), }); JSAPResult jsapResult = jsap.parse( arg ); if ( jsap.messagePrinted() ) return; final String functionName = jsapResult.getString( "trie" ); final String stringFile = jsapResult.getString( "stringFile" ); final Charset encoding = (Charset)jsapResult.getObject( "encoding" ); final boolean zipped = jsapResult.getBoolean( "zipped" ); final boolean iso = jsapResult.getBoolean( "iso" ); final boolean bitVector = jsapResult.getBoolean( "bitVector" ); final InputStream inputStream = "-".equals( stringFile ) ? System.in : new FileInputStream( stringFile ); final LineIterator lineIterator = new LineIterator( new FastBufferedReader( new InputStreamReader( zipped ? new GZIPInputStream( inputStream ) : inputStream, encoding ) ) ); final TransformationStrategy<CharSequence> transformationStrategy = iso ? TransformationStrategies.prefixFreeIso() : TransformationStrategies.prefixFreeUtf16(); ProgressLogger pl = new ProgressLogger(); pl.itemsName = "keys"; pl.displayFreeMemory = true; pl.start( "Adding keys..." ); if ( bitVector ) { ZFastTrie<LongArrayBitVector> zFastTrie = new ZFastTrie<LongArrayBitVector>( TransformationStrategies.identity() ); while( lineIterator.hasNext() ) { zFastTrie.add( LongArrayBitVector.copy( transformationStrategy.toBitVector( lineIterator.next().copy() ) ) ); pl.lightUpdate(); } pl.done(); BinIO.storeObject( zFastTrie, functionName ); } else { ZFastTrie<CharSequence> zFastTrie = new ZFastTrie<CharSequence>( transformationStrategy ); while( lineIterator.hasNext() ) { zFastTrie.add( lineIterator.next().copy() ); pl.lightUpdate(); } pl.done(); BinIO.storeObject( zFastTrie, functionName ); } LOGGER.info( "Completed." ); } @Override public ObjectBidirectionalIterator<T> iterator() { // TODO Auto-generated method stub return null; } @Override public ObjectSortedSet<T> headSet( T arg0 ) { // TODO Auto-generated method stub return null; } @Override public ObjectBidirectionalIterator<T> iterator( T arg0 ) { // TODO Auto-generated method stub return null; } @Override public ObjectSortedSet<T> subSet( T arg0, T arg1 ) { // TODO Auto-generated method stub return null; } @Override public ObjectSortedSet<T> tailSet( T arg0 ) { // TODO Auto-generated method stub return null; } @Override public Comparator<? super T> comparator() { // TODO Auto-generated method stub return null; } @Override public T first() { // TODO Auto-generated method stub return null; } @Override public T last() { // TODO Auto-generated method stub return null; } }
src/it/unimi/dsi/sux4j/util/ZFastTrie.java
package it.unimi.dsi.sux4j.util; /* * Sux4J: Succinct data structures for Java * * Copyright (C) 2010 Sebastiano Vigna * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 3 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, see <http://www.gnu.org/licenses/>. * */ import it.unimi.dsi.Util; import it.unimi.dsi.bits.BitVector; import it.unimi.dsi.bits.BitVectors; import it.unimi.dsi.bits.Fast; import it.unimi.dsi.bits.LongArrayBitVector; import it.unimi.dsi.bits.TransformationStrategies; import it.unimi.dsi.bits.TransformationStrategy; import it.unimi.dsi.fastutil.booleans.BooleanArrayList; import it.unimi.dsi.fastutil.ints.IntArrayList; import it.unimi.dsi.fastutil.ints.IntIterator; import it.unimi.dsi.fastutil.ints.IntOpenHashSet; import it.unimi.dsi.fastutil.io.BinIO; import it.unimi.dsi.fastutil.longs.LongOpenHashSet; import it.unimi.dsi.fastutil.objects.AbstractObjectIterator; import it.unimi.dsi.fastutil.objects.AbstractObjectSet; import it.unimi.dsi.fastutil.objects.AbstractObjectSortedSet; import it.unimi.dsi.fastutil.objects.ObjectArrayList; import it.unimi.dsi.fastutil.objects.ObjectBidirectionalIterator; import it.unimi.dsi.fastutil.objects.ObjectIterator; import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet; import it.unimi.dsi.fastutil.objects.ObjectSet; import it.unimi.dsi.fastutil.objects.ObjectSortedSet; import it.unimi.dsi.io.FastBufferedReader; import it.unimi.dsi.io.LineIterator; import it.unimi.dsi.lang.MutableString; import it.unimi.dsi.logging.ProgressLogger; import it.unimi.dsi.sux4j.mph.Hashes; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.nio.charset.Charset; import java.util.Comparator; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.zip.GZIPInputStream; import org.apache.log4j.Logger; import com.martiansoftware.jsap.FlaggedOption; import com.martiansoftware.jsap.JSAP; import com.martiansoftware.jsap.JSAPException; import com.martiansoftware.jsap.JSAPResult; import com.martiansoftware.jsap.Parameter; import com.martiansoftware.jsap.SimpleJSAP; import com.martiansoftware.jsap.Switch; import com.martiansoftware.jsap.UnflaggedOption; import com.martiansoftware.jsap.stringparsers.ForNameStringParser; /** A z-fast trie, that is, a predecessor/successor data structure using low linear (in the number of keys) additional space and * answering to the query string * <var>x</var> in time |<var>x</var>|/<var>w</var> + log(max{|<var>x</var>|, |<var>x</var><sup>-</sup>|, |<var>x</var><sup>+</sup>|}) with high probability, * where <var>w</var> is the machine word size, and <var>x</var><sup>-</sup>/<var>x</var><sup>+</sup> are the predecessor/successor of <var>x</var> in the currently stored set, respectively. * * <p>In rough terms, the z-fast trie uses time |<var>x</var>|/<var>w</var> (which is optimal) to actually look at the string content, * and log(max{|<var>x</var>|, |<var>x</var><sup>-</sup>|, |<var>x</var><sup>+</sup>|}) to perform the search. This is known to be (essentially) optimal. * String lengths are up to {@link Integer#MAX_VALUE}, and not limited to be a constant multiple of <var>w</var> for the bounds to hold. * * <p>The linear overhead of a z-fast trie is very low. For <var>n</var> keys we allocate 2<var>n</var> &minus; 1 nodes containing six references and * two longs, plus a dictionary containing <var>n</var> &minus; 1 nodes (thus using around 2<var>n</var> references and 2<var>n</var> longs). * */ public class ZFastTrie<T> extends AbstractObjectSortedSet<T> implements Serializable { public static final long serialVersionUID = 1L; private static final Logger LOGGER = Util.getLogger( ZFastTrie.class ); private static final boolean ASSERTS = false; private static final boolean SHORT_SIGNATURES = false; private static final boolean DEBUG = false; private static final boolean DDEBUG = DEBUG; /** The number of elements in the trie. */ private int size; /** The root node. */ private transient Node root; /** The transformation strategy. */ private final TransformationStrategy<? super T> transform; /** A dictionary mapping handles to the corresponding internal nodes. */ public transient Map map; /** The head of the doubly linked list of leaves. */ private transient Leaf head; /** The tail of the doubly linked list of leaves. */ private transient Leaf tail; /** A linear-probing hash map that compares keys using signatures as a first try. */ public final static class Map { private static final long serialVersionUID = 1L; private static final int INITIAL_LENGTH = 64; /** The transformation strategy. */ private final TransformationStrategy<Object> transform; /** The node table. */ private InternalNode[] node; /** The signature of the handle of the corresponding entry {@link #node}. */ private long[] signature; /** An array parallel to {@link #node} specifying whether a signature is a duplicate. * If true, there are more copies of the signature along the search path. */ private boolean dup[]; /** The number of elements in the table. */ private int size; /** The number of slots in the table (always a power of two). */ private int length; /** {@link #length} &minus; 1. */ private int mask; private void assertTable() { for( int i = signature.length; i-- != 0; ) if ( node[ i ] != null ) assert get( node[ i ].handle( transform ), true ) == node[ i ]; if ( size == 0 ) return; final IntOpenHashSet overallHashes = new IntOpenHashSet(); int start = 0; int first = -1; while( node[ start ] != null ) start = ( start + 1 ) & mask; // We are on an empty entry for( ;; ) { while( node[ start ] == null ) start = ( start + 1 ) & mask; // We are on a nonempty entry if ( first == -1 ) first = start; else if ( first == start ) break; int end = start; while( node[ end ] != null ) end = ( end + 1 ) & mask; // [start..end) is a maximal nonempty subsequence LongOpenHashSet signaturesSeen = new LongOpenHashSet(); IntOpenHashSet hashesSeen = new IntOpenHashSet(); for( int pos = end; pos != start; ) { pos = ( pos - 1 ) & mask; assert signaturesSeen.add( signature[ pos ] ) ^ dup[ pos ]; hashesSeen.add( hash( signature[ pos ] ) ); } // Hashes in each maximal nonempty subsequence must be disjoint for( IntIterator iterator = hashesSeen.iterator(); iterator.hasNext(); ) assert overallHashes.add( iterator.nextInt() ); start = end; } } public Map( final int size, TransformationStrategy<Object> transform ) { this.transform = transform; length = Math.max( INITIAL_LENGTH, 1 << Fast.ceilLog2( 1 + ( 3L * size / 2 ) ) ); mask = length - 1; signature = new long[ length ]; node = new InternalNode[ length ]; dup = new boolean[ length ]; } public Map( TransformationStrategy<Object> transform ) { this.transform = transform; length = INITIAL_LENGTH; mask = length - 1; signature = new long[ length ]; node = new InternalNode[ length ]; dup = new boolean[ length ]; } //public transient long probes = 0; //public transient long scans = 0; /** Generates a hash table position starting from the signature. * * @param s a signature. */ private int hash( final long s ) { return (int)( s ^ s >>> 32 ) & mask; } /** Find the position in the table of a given handle using signatures. * * <p>Note that this function just compares signatures (except for duplicates, which are * checked explicitly). Thus, it might return false positives. * * @param s the signature of the prefix of <code>v</code> of <code>handleLength</code> bits. * @param v a bit vector. * @param handleLength the length of the prefix of <code>v</code> that will be used as a handle. * @return the position in the table where the specified handle can be found, or <code>null</code>. */ private int findPos( final long s, final BitVector v, final long handleLength ) { int pos = hash( s ); while( node[ pos ] != null ) { if ( signature[ pos ] == s && ( ! dup[ pos ] || handleLength == node[ pos ].handleLength() && v.longestCommonPrefixLength( node[ pos ].reference.key( transform ) ) >= handleLength ) ) break; pos = ( pos + 1 ) & mask; } return pos; } /** Find the position in the table of a given handle using handles. * * <p>Note that this function compares handles. Thus, it always returns a correct value. * * @param s the signature of the prefix of <code>v</code> of <code>handleLength</code> bits. * @param v a bit vector. * @param handleLength the length of the prefix of <code>v</code> that will be used as a handle. * @return the position in the table where the specified handle can be found, or <code>null</code>. */ private int findExactPos( final long s, final BitVector v, final long handleLength ) { int pos = hash( s ); while( node[ pos ] != null ) { if ( signature[ pos ] == s && handleLength == node[ pos ].handleLength() && v.longestCommonPrefixLength( node[ pos ].reference.key( transform ) ) >= handleLength ) break; pos = ( pos + 1 ) & mask; } return pos; } public void clear() { length = INITIAL_LENGTH; mask = length - 1; size = 0; signature = new long[ length ]; node = new InternalNode[ length ]; dup = new boolean[ length ]; } public ObjectSet<LongArrayBitVector> keySet() { return new AbstractObjectSet<LongArrayBitVector>() { @Override public ObjectIterator<LongArrayBitVector> iterator() { return new AbstractObjectIterator<LongArrayBitVector>() { private int i = 0; private int pos = -1; @Override public boolean hasNext() { return i < size; } @Override public LongArrayBitVector next() { if ( ! hasNext() ) throw new NoSuchElementException(); while( node[ ++pos ] == null ); i++; return LongArrayBitVector.copy( node[ pos ].handle( transform ) ); } }; } @Override public boolean contains( Object o ) { BitVector v = (BitVector)o; return get( v, true ) != null; } @Override public int size() { return size; } }; } public ObjectSet<Node> values() { return new AbstractObjectSet<Node>() { @Override public ObjectIterator<Node> iterator() { return new AbstractObjectIterator<Node>() { private int i = 0; private int pos = -1; @Override public boolean hasNext() { return i < size; } @Override public Node next() { if ( ! hasNext() ) throw new NoSuchElementException(); while( node[ ++pos ] == null ); i++; return node[ pos ]; } }; } @Override public boolean contains( Object o ) { final Node node = (Node)o; return get( node.handle( transform ), true ) != null; } @Override public int size() { return size; } }; } /** Replaces an entry with a given node. * * @param newNode a node with a handle already existing in the table; the corresponding * node will be replaced. * @see #replace(long, InternalNode) */ public void replace( final InternalNode newNode ) { replace( newNode.handleHash( transform ), newNode ); } /** Replaces an entry with a given node. * * <p>Note that as long as the handle of <code>newNode</code> is actually in the * table this function will always perform correctly. Otherwise, the result is unpredictable. * * @param s the signature of <code>newNode</code>. * @param newNode a node with a handle already appearing in the table; the corresponding * node will be replaced. */ public void replace( long s, final InternalNode newNode ) { if ( SHORT_SIGNATURES ) s &= 0x3; final int pos = findPos( s, newNode.reference.key( transform ), newNode.handleLength() ); if ( ASSERTS ) assert node[ pos ] != null; if ( ASSERTS ) assert node[ pos ].handle( transform ).equals( newNode.handle( transform ) ) : node[ pos ].handle( transform ) + " != " + newNode.handle( transform ); node[ pos ] = newNode; if ( ASSERTS ) assertTable(); } /** Removes an existing entry from the table. * * <p>Note that as long as the given handle is actually in the * table this function will always perform correctly. Otherwise, the result is unpredictable. * * @param s the signature of the prefix of <code>v</code> of <code>handleLength</code> bits. * @param v a bit vector. * @param handleLength the length of the prefix of <code>v</code> that will be used as a handle. * @return true if some key was removed, but if the given handle was not in the table another handle * with the same signature might have been removed instead. */ public boolean remove( long s, final InternalNode v, final long handleLength ) { if ( DEBUG ) System.err.println( "Map.remove(" + s + ", " + v + ", " + handleLength + ")" ); if ( SHORT_SIGNATURES ) s &= 0x3; final int hash = hash( s ); int pos = hash; int lastDup = -1; // Keeps track of the last duplicate entry with the same signature. while( node[ pos ] != null ) { if ( signature[ pos ] == s ) { if ( ! dup[ pos ] || handleLength == node[ pos ].handleLength() && v.reference.key( transform ).longestCommonPrefixLength( node[ pos ].reference.key( transform ) ) >= handleLength ) break; else lastDup = pos; } pos = ( pos + 1 ) & mask; } // This should NOT happen, but let's return a sensible value anyway. if ( node[ pos ] == null ) return false; if ( ! dup[ pos ] && lastDup != -1 ) dup[ lastDup ] = false; // We are removing the only non-duplicate entry. // Move entries, compatibly with their hash code, to fill the hole. int candidateHole, h; do { candidateHole = pos; // Find candidate for a move (possibly empty). do { pos = ( pos + 1 ) & mask; if ( node[ pos ] == null ) break; h = hash( node[ pos ].handleHash( transform ) ); /* The hash h must lie cyclically between candidateHole and pos: more precisely, h must be after candidateHole * but before the first free entry in the table (which is equivalent to the previous statement). */ } while( candidateHole <= pos ? candidateHole < h && h <= pos : candidateHole < h || h <= pos ); node[ candidateHole ] = node[ pos ]; signature[ candidateHole ] = signature[ pos ]; dup[ candidateHole ] = dup[ pos ]; } while( node[ pos ] != null ); size--; if ( ASSERTS ) assertTable(); return true; } /** Adds a new entry to the table. * * @param v a node. * * @see #addNew(long, InternalNode) */ public void addNew( final InternalNode v ) { addNew( v.handleHash( transform ), v ); } /** Adds a new entry to the table. * * <p>Note that as long as the handle of the given node is not in the * table this function will always perform correctly. Otherwise, the result is unpredictable. * * @param s the signature of the handle of <code>v</code>. * @param v a node. */ public void addNew( long s, final InternalNode v ) { if ( SHORT_SIGNATURES ) s &= 0x3; if ( DEBUG ) System.err.println( "Map.addNew(" + s + ", " + v + ")" ); int pos = hash( s ); // Finds a free position, marking all keys with the same signature along the search path as duplicates. while( node[ pos ] != null ) { if ( signature[ pos ] == s ) dup[ pos ] = true; pos = ( pos + 1 ) & mask; } if ( ASSERTS ) assert node[ pos ] == null; size++; signature[ pos ] = s; node[ pos ] = v; if ( 3L * size > 2L * length ) { // Rehash. length *= 2; mask = length - 1; final long newKey[] = new long[ length ]; final InternalNode[] newValue = new InternalNode[ length ]; final boolean[] newDup = new boolean[ length ]; final long[] key = this.signature; final InternalNode[] value = this.node; for( int i = key.length; i-- != 0; ) { if ( value[ i ] != null ) { s = key[ i ]; pos = hash( s ); while( newValue[ pos ] != null ) { if ( newKey[ pos ] == s ) newDup[ pos ] = true; pos = ( pos + 1 ) & mask; } newKey[ pos ] = key[ i ]; newValue[ pos ] = value[ i ]; } } this.signature = newKey; this.node = newValue; this.dup = newDup; } if ( ASSERTS ) assertTable(); } public int size() { return size; } /** Retrives a node given its handle. * * @param v a bit vector. * @param handleLength the prefix of <code>v</code> that must be used as a handle. * @param s the signature of the specified handle. * @param exact whether the search should be exact; if false, and the given handle does not * appear in the table, it is possible that a wrong node is returned. * @return the node with given handle, or <code>null</code> if there is no such node. */ public InternalNode get( final BitVector v, final long handleLength, final long s, final boolean exact ) { if ( SHORT_SIGNATURES ) { final int pos = exact ? findExactPos( s & 0x3, v, handleLength ) : findPos( s & 0x3, v, handleLength ); return node[ pos ]; } else { final int pos = exact ? findExactPos( s, v, handleLength ) : findPos( s, v, handleLength ); return node[ pos ]; } } /** Retrives a node given its handle. * * @param handle a handle. * @param exact whether the search should be exact; if false, and the given handle does not * appear in the table, it is possible that a wrong node is returned. * @return the node with given handle, or <code>null</code> if there is no such node. * @see #get(BitVector, long, long, boolean) */ public InternalNode get( final BitVector handle, final boolean exact ) { return get( handle, handle.length(), Hashes.murmur( handle, 0 ), exact ); } public String toString() { StringBuilder s = new StringBuilder(); s.append( '{' ); for( LongArrayBitVector v: keySet() ) s.append( v ).append( " => " ).append( get( v, false ) ).append( ", " ); if ( s.length() > 1 ) s.setLength( s.length() - 2 ); s.append( '}' ); return s.toString(); } } /** A node of the trie. */ protected abstract static class Node { /** The length of the extent of the parent node, or 0 for the root. */ protected long parentExtentLength; /** The length of the extent (for leaves, this is equal to the length of the transformed {@link #key}). */ protected long extentLength; public boolean isLeaf() { return this instanceof Leaf; } public boolean isInternal() { return this instanceof InternalNode; } public long nameLength() { return parentExtentLength == 0 ? 0 : parentExtentLength + 1; } public long jumpLength() { final long handleLength = handleLength(); return handleLength + ( handleLength & -handleLength ); } public long handleLength() { return twoFattest( parentExtentLength, extentLength ); } public abstract BitVector key( TransformationStrategy<Object> transform ); public abstract BitVector handle( TransformationStrategy<Object> transform ); public abstract BitVector extent( TransformationStrategy<Object> transform ); public abstract boolean intercepts( final long h ); public long handleHash( TransformationStrategy<Object> transform ) { if ( SHORT_SIGNATURES ) return Hashes.murmur( handle( transform ), 0 ) & 0x3; else return Hashes.murmur( handle( transform ), 0 ); } /** Returns true if this node is the exit node of a string. * * @param v the string. * @param transform the transformation strategy used to build the trie this node belongs to. * @return true if the string exits at this node. */ public boolean isExitNodeOf( final LongArrayBitVector v, TransformationStrategy<Object> transform ) { return isExitNodeOf( v.length(), v.longestCommonPrefixLength( extent( transform ) ) ); } /** Returns true if this node is the exit node of a string given its length and the length of the longest * common prefix with the node extent. * * @param length the length of a string. * @param lcpLength the length of the longest common prefix between the string and the extent of this node. * @return true if the string exits at this node. */ public boolean isExitNodeOf( final long length, final long lcpLength ) { return parentExtentLength < lcpLength && ( lcpLength < extentLength || lcpLength == length ); } public String toString() { final TransformationStrategy transform = TransformationStrategies.prefixFreeIso(); return ( isLeaf() ? "[" : "(" ) + Integer.toHexString( hashCode() & 0xFFFF ) + ( key( transform ) == null ? "" : " " + ( extentLength > 16 ? key( transform ).subVector( 0, 8 ) + "..." + key( transform ).subVector( extentLength - 8, extentLength ): key( transform ).subVector( 0, extentLength ) ) ) + " (" + parentExtentLength + ".." + extentLength + "], " + handleLength() + "->" + jumpLength() + ( isLeaf() ? "]" : ")" ); } } /** A node of the trie. */ protected final static class InternalNode extends Node { /** The left subtree. */ protected Node left; /** The right subtree. */ protected Node right; /** The jump pointer for the left path for internal nodes; <code>null</code>, otherwise (this * makes leaves distinguishable). */ protected Node jumpLeft; /** The jump pointer for the right path for internal nodes; <code>null</code>, otherwise. */ protected Node jumpRight; /** The leaf whose key this node refers to for internal nodes; the internal node that * refers to the key of this leaf, otherwise. Will be <code>null</code> for exactly one leaf. */ protected Leaf reference; public boolean isLeaf() { return false; } public boolean isInternal() { return true; } public boolean intercepts( final long h ) { return h > parentExtentLength && h <= extentLength; } public BitVector extent( TransformationStrategy<Object> transform ) { return reference.key( transform ).subVector( 0, extentLength ); } @Override public BitVector key( TransformationStrategy<Object> transform ) { return reference.key( transform ); } public BitVector handle( TransformationStrategy<Object> transform ) { return reference.key( transform ).subVector( 0, handleLength() ); } } /** A node of the trie. */ protected final static class Leaf extends Node { /** The previous leaf. */ protected Leaf prev; /** The next leaf. */ protected Leaf next; /** The key upon which the extent of node is based, for internal nodes; the * key associated to a leaf, otherwise. */ protected CharSequence key; /** The leaf whose key this node refers to for internal nodes; the internal node that * refers to the key of this leaf, otherwise. Will be <code>null</code> for exactly one leaf. */ protected InternalNode reference; public boolean isLeaf() { return true; } public boolean isInternal() { return false; } public boolean intercepts( final long h ) { return h > parentExtentLength; } public BitVector extent( final TransformationStrategy<Object> transform ) { return key( transform ); } @Override public BitVector key( final TransformationStrategy<Object> transform ) { return transform.toBitVector( key ); } public BitVector handle( TransformationStrategy<Object> transform ) { return key( transform ).subVector( 0, handleLength() ); } } /** Creates a new z-fast trie using the given transformation strategy. * * @param transform a transformation strategy that must turn distinct elements into distinct, prefix-free bit vectors. */ @SuppressWarnings("unchecked") public ZFastTrie( final TransformationStrategy<? super T> transform ) { this.transform = transform; this.map = new Map( (TransformationStrategy<Object>)transform ); initHeadTail(); } private void initHeadTail() { head = new Leaf(); tail = new Leaf(); head.next = tail; tail.prev = head; } /** Creates a new z-fast trie using the given elements and transformation strategy. * * @param elements an iterator returning the elements among which the trie must be able to rank. * @param transform a transformation strategy that must turn distinct elements into distinct, prefix-free bit vectors. */ public ZFastTrie( final Iterator<? extends T> elements, final TransformationStrategy<? super T> transform ) { this( transform ); while( elements.hasNext() ) add( elements.next() ); } /** Creates a new z-fast trie using the given elements and transformation strategy. * * @param elements an iterator returning the elements among which the trie must be able to rank. * @param transform a transformation strategy that must turn distinct elements into distinct, prefix-free bit vectors. */ public ZFastTrie( final Iterable<? extends T> elements, final TransformationStrategy<? super T> transform ) { this( elements.iterator(), transform ); } public int size() { return size > Integer.MAX_VALUE ? -1 : (int)size; } /** Returns the 2-fattest number in an interval. * * <p>Note that to get the length of the handle of a node you must * call this function passing the length of the extent of the parent (one less * than the node name) and the length of the extent of the node. * * @param l left extreme (excluded). * @param r right extreme (included). * @return the 2-fattest number in (<code>l</code>..<code>r</code>]. */ private final static long twoFattest( final long l, final long r ) { return ( -1L << Fast.mostSignificantBit( l ^ r ) & r ); } private static void removeLeaf( final Leaf node ) { node.next.prev = node.prev; node.prev.next = node.next; } private static void addAfter( final Leaf pred, final Leaf node ) { node.next = pred.next; node.prev = pred; pred.next.prev = node; pred.next = node; } private static void addBefore( final Leaf succ, final Leaf node ) { node.prev = succ.prev; node.next = succ; succ.prev.next = node; succ.prev = node; } private void assertTrie() { /* Shortest key */ LongArrayBitVector root = null; /* Keeps track of which nodes in map are reachable using left/right from the root. */ ObjectOpenHashSet<Node> nodes = new ObjectOpenHashSet<Node>(); /* Keeps track of leaves. */ ObjectOpenHashSet<Leaf> leaves = new ObjectOpenHashSet<Leaf>(); /* Keeps track of reference to leaf keys in internal nodes. */ ObjectOpenHashSet<Object> references = new ObjectOpenHashSet<Object>(); assert size == 0 && map.size() == 0 || size == map.size() + 1; /* Search for the root (shortest handle) and check that nodes and handles do match. */ for( LongArrayBitVector v : map.keySet() ) { final long vHandleLength = map.get( v, false ).handleLength(); if ( root == null || map.get( root, false ).handleLength() > vHandleLength ) root = v; final InternalNode node = map.get( v, false ); nodes.add( node ); assert node.reference.reference == node : node + " -> " + node.reference + " -> " + node.reference.reference; } assert nodes.size() == map.size() : nodes.size() + " != " + map.size(); assert size < 2 || this.root == map.get( root, false ); if ( size > 1 ) { /* Verify doubly linked list of leaves. */ Leaf toRight = head.next, toLeft = tail.prev; for( int i = 1; i < size; i++ ) { assert new MutableString( toRight.key ).compareTo( toRight.next.key ) < 0 : toRight.key + " >= " + toRight.next.key + " " + toRight + " " + toRight.next; assert new MutableString( toLeft.key ).compareTo( toLeft.prev.key ) > 0 : toLeft.key + " >= " + toLeft.prev.key + " " + toLeft + " " + toLeft.prev; toRight = toRight.next; toLeft = toLeft.prev; } final int numNodes = visit( map.get( root, false ), null, 0, 0, nodes, leaves, references ); assert numNodes == 2 * size - 1 : numNodes + " != " + ( 2 * size - 1 ); assert leaves.size() == size; int c = 0; for( Leaf leaf: leaves ) if ( references.contains( leaf.key ) ) c++; assert c == size - 1 : c + " != " + ( size - 1 ); } else if ( size == 1 ) { assert head.next == this.root; assert tail.prev == this.root; } assert nodes.isEmpty(); } private int visit( final Node n, final Node parent, final long parentExtentLength, final int depth, ObjectOpenHashSet<Node> nodes, ObjectOpenHashSet<Leaf> leaves, ObjectOpenHashSet<Object> references ) { if ( n == null ) return 0; if ( DEBUG ) { for( int i = depth; i-- != 0; ) System.err.print( '\t' ); System.err.println( "Node " + n + " (parent extent length: " + parentExtentLength + ")" + ( n.isInternal() ? " Jump left: " + ((InternalNode)n).jumpLeft + " Jump right: " + ((InternalNode)n).jumpRight : "" ) ); } assert parent == null || parent.extent( (TransformationStrategy<Object>)transform ).equals( n.extent( (TransformationStrategy<Object>)transform ).subVector( 0, parent.extentLength ) ); assert parentExtentLength < n.extentLength; assert n.parentExtentLength == parentExtentLength : n.parentExtentLength + " != " + parentExtentLength + " " + n; if ( n.isInternal() ) { assert references.add( ((InternalNode)n).reference.key ); assert nodes.remove( n ) : n; assert map.keySet().contains( n.handle( (TransformationStrategy<Object>)transform ) ) : n; /* Check that jumps are correct. */ final long jumpLength = n.jumpLength(); Node jumpLeft = ((InternalNode)n).left; while( jumpLeft.isInternal() && jumpLength > jumpLeft.extentLength ) jumpLeft = ((InternalNode)jumpLeft).left; assert jumpLeft == ((InternalNode)n).jumpLeft : jumpLeft + " != " + ((InternalNode)n).jumpLeft + " (node: " + n + ")"; Node jumpRight = ((InternalNode)n).right; while( jumpRight.isInternal() && jumpLength > jumpRight.extentLength ) jumpRight = ((InternalNode)jumpRight).right; assert jumpRight == ((InternalNode)n).jumpRight : jumpRight + " != " + ((InternalNode)n).jumpRight + " (node: " + n + ")"; return 1 + visit( ((InternalNode)n).left, n, n.extentLength, depth + 1, nodes, leaves, references ) + visit( ((InternalNode)n).right, n, n.extentLength, depth + 1, nodes, leaves, references ); } else { assert leaves.add( (Leaf)n ); assert n.extentLength == n.key( (TransformationStrategy<Object>)transform ).length(); return 1; } } /** Sets the jump pointers of a node by searching exhaustively for * handles that are jumps of the node handle length. * * @param node the node whose jump pointers must be set. */ private static void setJumps( final InternalNode node ) { if ( DEBUG ) System.err.println( "setJumps(" + node + ")" ); final long jumpLength = node.jumpLength(); Node jump; for( jump = node.left; jump.isInternal() && jumpLength > jump.extentLength; ) jump = ((InternalNode)jump).jumpLeft; if ( ASSERTS ) assert jump.intercepts( jumpLength ) : jumpLength + " not in " + "(" + jump.parentExtentLength + ".." + jump.extentLength + "] " + jump; node.jumpLeft = jump; for( jump = node.right; jump.isInternal() && jumpLength > jump.extentLength; ) jump = ((InternalNode)jump).jumpRight; if ( ASSERTS ) assert jump.intercepts( jumpLength ) : jumpLength + " not in " + "(" + jump.parentExtentLength + ".." + jump.extentLength + "] " + jump; node.jumpRight = jump; } /** Fixes the right jumps of the ancestors of a node after an insertion. * * @param exitNode the exit node. * @param rightChild * @param leaf the new leaf. * @param stack a stack containing the 2-fat ancestors of <code>exitNode</code>. */ private static void fixRightJumpsAfterInsertion( final InternalNode internal, Node exitNode, boolean rightChild, Leaf leaf, final ObjectArrayList<InternalNode> stack ) { if ( DEBUG ) System.err.println( "fixRightJumpsAfterInsertion(" + internal + ", " + exitNode + ", " + rightChild + ", " + leaf + ", " + stack ); final long lcp = leaf.parentExtentLength; InternalNode toBeFixed = null; long jumpLength = -1; if ( ! rightChild ) { /* Nodes jumping to the left into the exit node but above the lcp must point to internal. */ while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpLeft != exitNode ) break; if ( jumpLength <= lcp ) toBeFixed.jumpLeft = internal; } } else { while( ! stack.isEmpty() ) { toBeFixed = stack.top(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpRight != exitNode || jumpLength > lcp ) break; toBeFixed.jumpRight = internal; stack.pop(); } while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); while( exitNode.isInternal() && toBeFixed.jumpRight != exitNode ) exitNode = ((InternalNode)exitNode).jumpRight; if ( toBeFixed.jumpRight != exitNode ) return; toBeFixed.jumpRight = leaf; } } } /** Fixes the left jumps of the ancestors of a node after an insertion. * * @param exitNode the exit node. * @param rightChild * @param leaf the new leaf. * @param stack a stack containing the fat ancestors of <code>exitNode</code>. */ private static void fixLeftJumpsAfterInsertion( final InternalNode internal, Node exitNode, boolean rightChild, Leaf leaf, final ObjectArrayList<InternalNode> stack ) { if ( DEBUG ) System.err.println( "fixLeftJumpsAfterInsertion(" + internal + ", " + exitNode + ", " + rightChild + ", " + leaf + ", " + stack ); final long lcp = leaf.parentExtentLength; InternalNode toBeFixed = null; long jumpLength = -1; if ( rightChild ) { /* Nodes jumping to the right into the exit node but above the lcp must point to internal. */ while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpRight != exitNode ) break; if ( jumpLength <= lcp ) toBeFixed.jumpRight = internal; } } else { while( ! stack.isEmpty() ) { toBeFixed = stack.top(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpLeft != exitNode || jumpLength > lcp ) break; toBeFixed.jumpLeft = internal; stack.pop(); } while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); while( exitNode.isInternal() && toBeFixed.jumpLeft != exitNode ) exitNode = ((InternalNode)exitNode).jumpLeft; if ( toBeFixed.jumpLeft != exitNode ) return; toBeFixed.jumpLeft = leaf; } } } /** Fixes the right jumps of the ancestors of a node after a deletion. * * @param parentExitNode the exit node. * @param rightChild * @param exitNode the new leaf. * @param stack a stack containing the 2-fat ancestors of <code>exitNode</code>. */ private static void fixRightJumpsAfterDeletion( Node otherNode, InternalNode parentExitNode, boolean rightChild, Leaf exitNode, final ObjectArrayList<InternalNode> stack ) { if ( DEBUG ) System.err.println( "fixRightJumpsAfterDeletion(" + otherNode + ", " + parentExitNode + ", " + rightChild + ", " + exitNode + ", " + stack ); InternalNode toBeFixed = null; long jumpLength = -1; if ( ! rightChild ) { /* Nodes jumping to the left into the exit node but above the lcp must point to internal. */ while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpLeft != parentExitNode ) break; toBeFixed.jumpLeft = otherNode; } } else { while( ! stack.isEmpty() ) { toBeFixed = stack.top(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpRight != parentExitNode ) break; toBeFixed.jumpRight = otherNode; stack.pop(); } while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); if ( toBeFixed.jumpRight != exitNode ) break; jumpLength = toBeFixed.jumpLength(); while( ! otherNode.intercepts( jumpLength ) ) otherNode = ((InternalNode)otherNode).jumpRight; toBeFixed.jumpRight = otherNode; } } } /** Fixes the left jumps of the ancestors of a node after a deletion. * * @param parentExitNode the exit node. * @param rightChild * @param exitNode the new leaf. * @param stack a stack containing the 2-fat ancestors of <code>exitNode</code>. */ private static void fixLeftJumpsAfterDeletion( Node otherNode, InternalNode parentExitNode, boolean rightChild, Leaf exitNode, final ObjectArrayList<InternalNode> stack ) { if ( DEBUG ) System.err.println( "fixLeftJumpsAfterDeletion(" + otherNode + ", " + parentExitNode + ", " + rightChild + ", " + exitNode + ", " + stack ); InternalNode toBeFixed = null; long jumpLength = -1; if ( rightChild ) { /* Nodes jumping to the left into the exit node but above the lcp must point to internal. */ while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpRight != parentExitNode ) break; toBeFixed.jumpRight = otherNode; } } else { while( ! stack.isEmpty() ) { toBeFixed = stack.top(); jumpLength = toBeFixed.jumpLength(); if ( toBeFixed.jumpLeft != parentExitNode ) break; toBeFixed.jumpLeft = otherNode; stack.pop(); } while( ! stack.isEmpty() ) { toBeFixed = stack.pop(); if ( toBeFixed.jumpLeft != exitNode ) break; jumpLength = toBeFixed.jumpLength(); while( ! otherNode.intercepts( jumpLength ) ) otherNode = ((InternalNode)otherNode).jumpLeft; toBeFixed.jumpLeft = otherNode; } } } @SuppressWarnings("unchecked") public boolean remove( final Object k ) { final LongArrayBitVector v = LongArrayBitVector.copy( transform.toBitVector( (T)k ) ); if ( DEBUG ) System.err.println( "remove(" + v + ")" ); if ( DEBUG ) System.err.println( "Map: " + map + " root: " + root ); if ( size == 0 ) return false; if ( size == 1 ) { if ( ! ((Leaf)root).key.equals( k ) ) return false; root = null; size = 0; if ( ASSERTS ) assertTrie(); return true; } final ObjectArrayList<InternalNode> stack = new ObjectArrayList<InternalNode>( 64 ); InternalNode parentExitNode; boolean rightLeaf, rightChild = false; Node exitNode; long lcp; final long[] state = Hashes.preprocessMurmur( v, 0 ); parentExitNode = getParentExitNode( v, state, stack ); rightLeaf = parentExitNode != null && parentExitNode.extentLength < v.length() && v.getBoolean( parentExitNode.extentLength ); exitNode = parentExitNode == null ? root : ( rightLeaf ? parentExitNode.right : parentExitNode.left ); lcp = exitNode.key( (TransformationStrategy<Object>)transform ).longestCommonPrefixLength( v ); if ( DDEBUG ) System.err.println( "Exit node " + exitNode ); if ( ! ( exitNode.isLeaf() && ((Leaf)exitNode).key.equals( k ) ) ) return false; // Not found final Node otherNode = rightLeaf ? parentExitNode.left : parentExitNode.right; final boolean otherNodeIsInternal = otherNode.isInternal(); if ( parentExitNode != null && parentExitNode != root ) { // Let us fix grandpa's child pointer. InternalNode grandParentExitNode = getGrandParentExitNode( v, state, stack ); if ( rightChild = ( grandParentExitNode.right == parentExitNode ) ) grandParentExitNode.right = otherNode; else grandParentExitNode.left = otherNode; } final long parentExitNodehandleLength = parentExitNode.handleLength(); final long otherNodeHandleLength = otherNode.handleLength(); final long t = parentExitNodehandleLength | otherNodeHandleLength; final boolean cutLow = ( t & -t & otherNodeHandleLength ) != 0; if ( DEBUG ) System.err.println( "lcp: " + lcp ); if ( parentExitNode == root ) root = otherNode; // Fix leaf reference if not null final InternalNode refersToExitNode = ((Leaf)exitNode).reference; if ( refersToExitNode == null ) parentExitNode.reference.reference = null; else { refersToExitNode.reference = parentExitNode.reference; refersToExitNode.reference.reference = refersToExitNode; } // Fix doubly-linked list removeLeaf( (Leaf)exitNode ); if ( DDEBUG ) System.err.println( "Cut " + ( cutLow ? "low" : "high") + "; leaf on the " + ( rightLeaf ? "right" : "left") + "; other node is " + ( otherNodeIsInternal ? "internal" : "a leaf") ); if ( rightLeaf ) fixRightJumpsAfterDeletion( otherNode, parentExitNode, rightChild, (Leaf)exitNode, stack ); else fixLeftJumpsAfterDeletion( otherNode, parentExitNode, rightChild, (Leaf)exitNode, stack ); if ( cutLow && otherNodeIsInternal ) { map.remove( Hashes.murmur( otherNode.key( (TransformationStrategy<Object>)transform ), otherNodeHandleLength, state, parentExitNode.extentLength ), (InternalNode)otherNode, otherNodeHandleLength ); otherNode.parentExtentLength = parentExitNode.parentExtentLength; map.replace( Hashes.murmur( v, parentExitNodehandleLength, state ), (InternalNode)otherNode ); setJumps( (InternalNode)otherNode ); } else { otherNode.parentExtentLength = parentExitNode.parentExtentLength; map.remove( Hashes.murmur( v, parentExitNodehandleLength, state ), parentExitNode, parentExitNodehandleLength ); } size--; if ( ASSERTS ) { assertTrie(); assert ! contains( k ); } return true; } @Override public boolean add( final T k ) { if ( DEBUG ) System.err.println( "add(" + k + ")" ); final LongArrayBitVector v = LongArrayBitVector.copy( transform.toBitVector( k ) ); if ( DEBUG ) System.err.println( "add(" + v + ")" ); if ( DEBUG ) System.err.println( "Map: " + map + " root: " + root ); if ( size == 0 ) { root = new Leaf(); ((Leaf)root).key = (CharSequence)k; root.parentExtentLength = 0; root.extentLength = v.length(); ((Leaf)root).reference = null; addAfter( head, (Leaf)root ); size++; if ( ASSERTS ) assertTrie(); return true; } final ObjectArrayList<InternalNode> stack = new ObjectArrayList<InternalNode>( 64 ); InternalNode parentExitNode; boolean rightChild; Node exitNode; long lcp; final long[] state = Hashes.preprocessMurmur( v, 0 ); parentExitNode = getParentExitNode( v, state, stack ); rightChild = parentExitNode != null && parentExitNode.extentLength < v.length() && v.getBoolean( parentExitNode.extentLength ); exitNode = parentExitNode == null ? root : ( rightChild ? parentExitNode.right : parentExitNode.left ); lcp = exitNode.key( (TransformationStrategy<Object>)transform ).longestCommonPrefixLength( v ); if ( DDEBUG ) System.err.println( "Exit node " + exitNode ); if ( exitNode.isLeaf() && ((Leaf)exitNode).key.equals( k ) ) return false; // Already there final boolean exitDirection = v.getBoolean( lcp ); final long exitNodeHandleLength = exitNode.handleLength(); final boolean cutLow = lcp >= exitNodeHandleLength; if ( DEBUG ) System.err.println( "lcp: " + lcp ); Leaf leaf = new Leaf(); InternalNode internal = new InternalNode(); final boolean exitNodeIsInternal = exitNode.isInternal(); leaf.key = (CharSequence)k; leaf.parentExtentLength = lcp; leaf.extentLength = v.length(); leaf.reference = internal; internal.reference = leaf; internal.parentExtentLength = exitNode.parentExtentLength; internal.extentLength = lcp; if ( exitDirection ) { internal.jumpRight = internal.right = leaf; internal.left = exitNode; internal.jumpLeft = cutLow && exitNodeIsInternal ? ((InternalNode)exitNode).jumpLeft : exitNode; } else { internal.jumpLeft = internal.left = leaf; internal.right = exitNode; internal.jumpRight = cutLow && exitNodeIsInternal ? ((InternalNode)exitNode).jumpRight : exitNode; } if ( exitNode == root ) root = internal; // Update root else { if ( rightChild ) parentExitNode.right = internal; else parentExitNode.left = internal; } if ( DDEBUG ) System.err.println( "Cut " + ( cutLow ? "low" : "high") + "; exit to the " + ( exitDirection ? "right" : "left") ); if ( exitDirection ) fixRightJumpsAfterInsertion( internal, exitNode, rightChild, leaf, stack ); else fixLeftJumpsAfterInsertion( internal, exitNode, rightChild, leaf, stack ); if ( cutLow && exitNodeIsInternal ) { map.replace( Hashes.murmur( v, exitNodeHandleLength, state ), internal ); exitNode.parentExtentLength = lcp; map.addNew( Hashes.murmur( exitNode.key( (TransformationStrategy<Object>)transform ), exitNode.handleLength(), state, lcp ), (InternalNode)exitNode ); setJumps( (InternalNode)exitNode ); } else { exitNode.parentExtentLength = lcp; map.addNew( Hashes.murmur( v, internal.handleLength(), state ), internal ); } if ( DEBUG ) System.err.println( "After insertion, map: " + map + " root: " + root ); size++; /* We find a predecessor or successor to insert the new leaf in the doubly linked list. */ if ( exitDirection ) { while( exitNode.isInternal() ) exitNode = ((InternalNode)exitNode).jumpRight; addAfter( (Leaf)exitNode, leaf ); } else { while( exitNode.isInternal() ) exitNode = ((InternalNode)exitNode).jumpLeft; addBefore( (Leaf)exitNode, leaf ); } if ( ASSERTS ) assertTrie(); if ( ASSERTS ) assert contains( k ); return true; } /** Returns the exit node of a given bit vector. * * @param v a bit vector. * @return the exit node of <code>v</code>. */ private Node getExitNode( final LongArrayBitVector v, final long[] state ) { if ( size == 0 ) throw new IllegalStateException(); if ( size == 1 ) return root; if ( DDEBUG ) System.err.println( "getExitNode(" + v + ")" ); final long length = v.length(); // This can be the exit node of v, the parex node of v, or something completely wrong. InternalNode parexOrExitNode = fatBinarySearch( v, state, null, false, 0, length ); // This will contain the exit node if parexOrExitNode contains the correct parex. Node candidateExitNode; if ( parexOrExitNode == null ) candidateExitNode = root; else candidateExitNode = parexOrExitNode.extentLength < length && v.getBoolean( parexOrExitNode.extentLength ) ? parexOrExitNode.right : parexOrExitNode.left; /* This lcp length makes it possible to compute the length of the lcp between v and * parexOrExitNode by minimisation with the extent length, as necessarily the extent of * candidateExitNode is an extension of the extent of parexOrExitNode. */ long lcpLength = v.longestCommonPrefixLength( candidateExitNode.extent( (TransformationStrategy<Object>)transform ) ); if ( candidateExitNode.isExitNodeOf( length, lcpLength ) ) return candidateExitNode; if ( parexOrExitNode.isExitNodeOf( length, Math.min( parexOrExitNode.extentLength, lcpLength ) ) ) return parexOrExitNode; parexOrExitNode = fatBinarySearch( v, state, null, true, 0, length ); if ( parexOrExitNode == null ) return root; else candidateExitNode = parexOrExitNode.extentLength < length && v.getBoolean( parexOrExitNode.extentLength ) ? parexOrExitNode.right : parexOrExitNode.left; lcpLength = v.longestCommonPrefixLength( candidateExitNode.extent( (TransformationStrategy<Object>)transform ) ); if ( candidateExitNode.isExitNodeOf( length, lcpLength ) ) return candidateExitNode; return parexOrExitNode; } /** Returns the parent of the exit node of a given bit vector. * * @param v a bit vector. * @param stack a stack that will be filled with the <em>fat nodes</em> along the path to the parent of the exit node. * @param exact if true, the map defining the trie will be accessed in exact mode. * @return the parent of the exit node of <code>v</code>, or <code>null</code> if the exit node is the root; * if <code>exact</code> is false, with low probability * the result might be wrong. */ public InternalNode getParentExitNode( final LongArrayBitVector v, final long[] state, final ObjectArrayList<InternalNode> stack, final boolean exact ) { if ( size == 0 || stack == null ) throw new IllegalStateException(); if ( size == 1 ) return null; if ( DDEBUG ) System.err.println( "getParentExitNode(" + v + ", " + exact + ")" ); final InternalNode node = fatBinarySearch( v, state, stack, exact, 0, v.length() ); if ( node == null ) return null; if ( node.extent( (TransformationStrategy<Object>)transform ).isProperPrefix( v ) ) return node; stack.pop(); if ( stack.isEmpty() ) return fatBinarySearch( v, state, stack, exact, 0, node.parentExtentLength ); if ( stack.top().extentLength == node.parentExtentLength ) return stack.top(); return fatBinarySearch( v, state, stack, exact, stack.top().extentLength, node.parentExtentLength ); } /** Returns the parent of the exit node of a given bit vector. * * @param v a bit vector. * @param stack if not <code>null</code>, a stack that will be filled with the <em>fat nodes</em> along the path to the parent of the exit node. * @return the parent of the exit node of <code>v</code>, or <code>null</code> if the exit node is the root. */ public InternalNode getParentExitNode( final LongArrayBitVector v, final long[] state, final ObjectArrayList<InternalNode> stack ) { if ( size == 1 ) return null; final InternalNode candidate = getParentExitNode( v, state, stack, false ); if ( candidate == null ) { if ( root.isExitNodeOf( v, (TransformationStrategy<Object>)transform ) ) return null; } else if ( candidate.extent( (TransformationStrategy<Object>)transform ).isProperPrefix( v ) ) { final Node exitNode = v.getBoolean( candidate.extentLength ) ? candidate.right : candidate.left; if ( exitNode.isExitNodeOf( v, (TransformationStrategy<Object>)transform ) ) return candidate; } stack.clear(); return getParentExitNode( v, state, stack, true ); } /** Returns the parent of the exit node of a given bit vector. * * @param v a bit vector. * @param stack if not <code>null</code>, a stack that will be filled with the <em>fat nodes</em> along the path to the parent of the exit node. * @param exact if true, the map defining the trie will be accessed in exact mode. * @return the parent of the exit node of <code>v</code>, or <code>null</code> if the exit node is the root; * if <code>exact</code> is false, with low probability * the result might be wrong. */ public InternalNode getGrandParentExitNode( final LongArrayBitVector v, final long[] state, final ObjectArrayList<InternalNode> stack, final boolean exact ) { final InternalNode parentExitNode = stack.pop(); if ( stack.isEmpty() ) return fatBinarySearch( v, state, stack, exact, 0, parentExitNode.parentExtentLength ); if ( stack.top().extentLength == parentExitNode.parentExtentLength ) return stack.top(); return fatBinarySearch( v, state, stack, exact, stack.top().extentLength, parentExitNode.parentExtentLength ); } /** Returns the parent of the exit node of a given bit vector. * * @param v a bit vector. * @param stack if not <code>null</code>, a stack that will be filled with the <em>fat nodes</em> along the path to the parent of the exit node. * @return the parent of the exit node of <code>v</code>, or <code>null</code> if the exit node is the root; * if <code>exact</code> is false, with low probability * the result might be wrong. */ public InternalNode getGrandParentExitNode( final LongArrayBitVector v, final long[] state, final ObjectArrayList<InternalNode> stack ) { // TODO: make it work with non-exact search return getGrandParentExitNode( v, state, stack, true ); /*stack.clear(); InternalNode parentExitNode = getParentExitNode( v, state, null ); return getParentExitNode( v.copy( 0, parentExitNode.extentLength ), state, stack );*/ } private InternalNode fatBinarySearch( final LongArrayBitVector v, final long[] state, final ObjectArrayList<InternalNode> stack, final boolean exact, long a, long b ) { InternalNode node = null, top = stack == null || stack.isEmpty() ? null : stack.top(); //System.err.println( "Fat binary " + v + " " + stack + " (" + l + ".." + r + ") " + exact ); final int logLength = Fast.mostSignificantBit( b ); while( b - a > 0 ) { if ( ASSERTS ) assert logLength > -1; if ( DDEBUG ) System.err.println( "(" + a + ".." + b + "]" ); final long f = twoFattest( a, b ); if ( DDEBUG ) System.err.println( "Inquiring with key " + v.subVector( 0, f ) + " (" + f + ")" ); node = map.get( v, f, Hashes.murmur( v, f, state ), exact ); final long g; // Note that this test is just to catch false positives if ( node == null || ( g = node.extentLength ) < f ) { if ( DDEBUG ) System.err.println( "Missing" ); b = f - 1; } else { if ( DDEBUG ) System.err.println( "Found extent of length " + g ); if ( stack != null ) stack.push( node ); top = node; a = g; } } /* if ( parent == null || secondRound || equals( parent.reference.key, v, lastHandleLength, parent.extentLength ) ) break; secondRound = true; //System.err.println( "** " + v ); if ( stack != null ) stack.pop(); r = parent.parentExtentLength + 1; if ( last == null ) { l = 0; parent = null; } else { parent = last; l = parent.extentLength; if ( l == r ) break; } last = null; //System.err.println( "Restarting with (" + l + ".." + r + ")" ); //System.err.println( map ); }*/ if ( DDEBUG ) System.err.println( "Final length " + a + " node: " + top ); if ( false && ASSERTS ) { boolean rightChild; Node exitNode; long lcp; rightChild = top != null && top.extentLength < v.length() && v.getBoolean( top.extentLength ); exitNode = top == null ? root : ( rightChild ? top.right : top.left ); lcp = exitNode.key( (TransformationStrategy<Object>)transform ).longestCommonPrefixLength( v ); if ( exitNode.intercepts( lcp ) ) { // We can do asserts only if the result is correct /* If parent is null, the extent of the root must not be a prefix of v. */ if ( top == null ) assert root.key( (TransformationStrategy<Object>)transform ).longestCommonPrefixLength( v ) < root.extentLength; else { assert top.extentLength == a; /* If parent is not null, the extent of the parent must be a prefix of v, * and the extent of the exit node must be either v, or not a prefix of v. */ assert ! exact || top.extent( (TransformationStrategy<Object>)transform ).longestCommonPrefixLength( v ) == top.extentLength; if ( stack != null ) { /** We check that the stack contains exactly all handles that are backjumps * of the length of the extent of the parent. */ a = top.extentLength; while( a != 0 ) { final Node t = map.get( top.key( (TransformationStrategy<Object>)transform ).subVector( 0, a ), true ); if ( t != null ) assert stack.contains( t ); a ^= ( a & -a ); } /** We check that the stack contains the nodes you would obtain by searching from * the top for nodes to fix. */ long left = 0; for( int i = 0; i < stack.size(); i++ ) { assert stack.get( i ).handleLength() == twoFattest( left, top.extentLength ) : stack.get( i ).handleLength() + " != " + twoFattest( left, top.extentLength ) + " " + i + " " + stack ; left = stack.get( i ).extentLength; } } } } } return top; } @SuppressWarnings("unchecked") public boolean contains( final Object o ) { if ( DEBUG ) System.err.println( "contains(" + o + ")" ); if ( size == 0 ) return false; final LongArrayBitVector v = LongArrayBitVector.copy( transform.toBitVector( (T)o ) ); final long[] state = Hashes.preprocessMurmur( v, 0 ); final Node exitNode = getExitNode( v, state ); return exitNode.isLeaf() && ((Leaf)exitNode).key.equals( o ); } @SuppressWarnings("unchecked") public CharSequence pred( final Object o ) { if ( size == 0 ) return null; final LongArrayBitVector v = LongArrayBitVector.copy( transform.toBitVector( (T)o ) ); final long[] state = Hashes.preprocessMurmur( v, 0 ); Node exitNode = getExitNode( v, state ); if ( v.compareTo( exitNode.extent( (TransformationStrategy<Object>)transform ) ) <= 0 ) { while( exitNode.isInternal() && ((InternalNode)exitNode).jumpRight != null ) exitNode = ((InternalNode)exitNode).jumpRight; return ((Leaf)exitNode).key; } else { while( exitNode.isInternal() && ((InternalNode)exitNode).jumpLeft != null ) exitNode = ((InternalNode)exitNode).jumpLeft; return ((Leaf)exitNode).prev.key; } } @SuppressWarnings("unchecked") public CharSequence succ( final Object o ) { if ( size == 0 ) return null; final LongArrayBitVector v = LongArrayBitVector.copy( transform.toBitVector( (T)o ) ); final long[] state = Hashes.preprocessMurmur( v, 0 ); Node exitNode = getExitNode( v, state ); if ( v.compareTo( exitNode.extent( (TransformationStrategy<Object>)transform ) ) <= 0 ) { while( exitNode.isInternal() && ((InternalNode)exitNode).jumpLeft != null ) exitNode = ((InternalNode)exitNode).jumpLeft; return ((Leaf)exitNode).key; } else { while( exitNode.isInternal() && ((InternalNode)exitNode).jumpRight != null ) exitNode = ((InternalNode)exitNode).jumpRight; return ((Leaf)exitNode).next.key; } } private void writeObject( final ObjectOutputStream s ) throws IOException { s.defaultWriteObject(); if ( size > 0 ) writeNode( root, (TransformationStrategy<Object>)transform, s ); } private static void writeNode( final Node node, final TransformationStrategy<Object> transform, final ObjectOutputStream s ) throws IOException { s.writeBoolean( node.isInternal() ); s.writeLong( node.extentLength - node.parentExtentLength ); if ( node.isInternal() ) { writeNode( ((InternalNode)node).left, transform, s ); writeNode( ((InternalNode)node).right, transform, s ); } else s.writeObject( ((Leaf)node).key ); } private void readObject( final ObjectInputStream s ) throws IOException, ClassNotFoundException { s.defaultReadObject(); initHeadTail(); map = new Map( size, (TransformationStrategy<Object>)transform ); if ( size > 0 ) root = readNode( s, 0, 0, map, new ObjectArrayList<Leaf>(), new ObjectArrayList<InternalNode>(), new IntArrayList(), new IntArrayList(), new BooleanArrayList() ); if ( ASSERTS ) assertTrie(); } /** Reads recursively a node of the trie. * * @param s the object input stream. * @param depth the depth of the node to be read. * @param parentExtentLength the length of the extent of the parent node. * @param map the map representing the trie. * @param leafStack a stack that cumulates leaves as they are found: internal nodes extract references from this stack when their visit is completed. * @param jumpStack a stack that cumulates nodes that need jump pointer fixes. * @param depthStack a stack parallel to <code>jumpStack</code>, providing the depth of the corresponding node. * @param segmentStack a stack of integers representing the length of maximal constant subsequences of the string of directions taken up to the current node; for instance, if we reached the current node by 1/1/0/0/0/1/0/0, the stack will contain 2,3,1,2. * @param dirStack a stack parallel to <code>segmentStack</code>: for each element, whether it counts left or right turns. * @return the subtree rooted at the next node in the stream. */ private Node readNode( final ObjectInputStream s, final int depth, final long parentExtentLength, final Map map, final ObjectArrayList<Leaf> leafStack, final ObjectArrayList<InternalNode> jumpStack, final IntArrayList depthStack, final IntArrayList segmentStack, final BooleanArrayList dirStack ) throws IOException, ClassNotFoundException { final boolean isInternal = s.readBoolean(); final long pathLength = s.readLong(); final Node node = isInternal ? new InternalNode() : new Leaf(); node.parentExtentLength = parentExtentLength; node.extentLength = parentExtentLength + pathLength; if ( ! dirStack.isEmpty() ) { /* We cannot fix the jumps of nodes that are more than this number of levels up in the tree. */ final int maxDepthDelta = segmentStack.topInt(); final boolean dir = dirStack.topBoolean(); InternalNode anc; int d; long jumpLength; do { jumpLength = ( anc = jumpStack.top() ).jumpLength(); d = depthStack.topInt(); /* To be fixable, a node must be within the depth limit, and we must intercept its jump length (note that * we cannot use .intercept() as the state of node is not yet consistent). If a node cannot be fixed, no * node higher in the stack can. */ if ( depth - d <= maxDepthDelta && jumpLength > parentExtentLength && ( ! isInternal || jumpLength <= node.extentLength ) ) { //if ( DDEBUG ) System.err.println( "Setting " + ( dir ? "right" : "left" ) + " jump pointer of " + anc + " to " + node ); if ( dir ) anc.jumpRight = node; else anc.jumpLeft = node; jumpStack.pop(); depthStack.popInt(); } else break; } while( ! jumpStack.isEmpty() ); } if ( isInternal ) { if ( dirStack.isEmpty() || dirStack.topBoolean() != false ) { segmentStack.push( 1 ); dirStack.push( false ); } else segmentStack.push( segmentStack.popInt() + 1 ); jumpStack.push( (InternalNode)node ); depthStack.push( depth ); if ( DEBUG ) System.err.println( "Recursing into left node... " ); ((InternalNode)node).left = readNode( s, depth + 1, node.extentLength, map, leafStack, jumpStack, depthStack, segmentStack, dirStack ); int top = segmentStack.popInt(); if ( top != 1 ) segmentStack.push( top - 1 ); else dirStack.popBoolean(); if ( dirStack.isEmpty() || dirStack.topBoolean() != true ) { segmentStack.push( 1 ); dirStack.push( true ); } else segmentStack.push( segmentStack.popInt() + 1 ); jumpStack.push( (InternalNode)node ); depthStack.push( depth ); if ( DEBUG ) System.err.println( "Recursing into right node... " ); ((InternalNode)node).right = readNode( s, depth + 1, node.extentLength, map, leafStack, jumpStack, depthStack, segmentStack, dirStack ); top = segmentStack.popInt(); if ( top != 1 ) segmentStack.push( top - 1 ); else dirStack.popBoolean(); /* We assign the reference leaf, and store the associated key. */ final Leaf referenceLeaf = leafStack.pop(); ((InternalNode)node).reference = referenceLeaf; referenceLeaf.reference = (InternalNode)node; map.addNew( (InternalNode)node ); if ( ASSERTS ) { // Check jump pointers. Node t; t = ((InternalNode)node).left; while( t.isInternal() && ! t.intercepts( node.jumpLength() ) ) t = ((InternalNode)t).left; assert ((InternalNode)node).jumpLeft == t : ((InternalNode)node).jumpLeft + " != " + t + " (" + node + ")"; t = ((InternalNode)node).right; while( t.isInternal() && ! t.intercepts( node.jumpLength() ) ) t = ((InternalNode)t).right; assert ((InternalNode)node).jumpRight == t : ((InternalNode)node).jumpRight + " != " + t + " (" + node + ")"; } } else { ((Leaf)node).key = (CharSequence)s.readObject(); leafStack.push( (Leaf)node ); addBefore( tail, (Leaf)node ); } return node; } public static void main( final String[] arg ) throws NoSuchMethodException, IOException, JSAPException { final SimpleJSAP jsap = new SimpleJSAP( ZFastTrie.class.getName(), "Builds an PaCo trie-based monotone minimal perfect hash function reading a newline-separated list of strings.", new Parameter[] { new FlaggedOption( "encoding", ForNameStringParser.getParser( Charset.class ), "UTF-8", JSAP.NOT_REQUIRED, 'e', "encoding", "The string file encoding." ), new Switch( "iso", 'i', "iso", "Use ISO-8859-1 coding internally (i.e., just use the lower eight bits of each character)." ), new Switch( "bitVector", 'b', "bit-vector", "Build a trie of bit vectors, rather than a trie of strings." ), new Switch( "zipped", 'z', "zipped", "The string list is compressed in gzip format." ), new UnflaggedOption( "trie", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.REQUIRED, JSAP.NOT_GREEDY, "The filename for the serialised z-fast trie." ), new UnflaggedOption( "stringFile", JSAP.STRING_PARSER, "-", JSAP.NOT_REQUIRED, JSAP.NOT_GREEDY, "The name of a file containing a newline-separated list of strings, or - for standard input." ), }); JSAPResult jsapResult = jsap.parse( arg ); if ( jsap.messagePrinted() ) return; final String functionName = jsapResult.getString( "trie" ); final String stringFile = jsapResult.getString( "stringFile" ); final Charset encoding = (Charset)jsapResult.getObject( "encoding" ); final boolean zipped = jsapResult.getBoolean( "zipped" ); final boolean iso = jsapResult.getBoolean( "iso" ); final boolean bitVector = jsapResult.getBoolean( "bitVector" ); final InputStream inputStream = "-".equals( stringFile ) ? System.in : new FileInputStream( stringFile ); final LineIterator lineIterator = new LineIterator( new FastBufferedReader( new InputStreamReader( zipped ? new GZIPInputStream( inputStream ) : inputStream, encoding ) ) ); final TransformationStrategy<CharSequence> transformationStrategy = iso ? TransformationStrategies.prefixFreeIso() : TransformationStrategies.prefixFreeUtf16(); ProgressLogger pl = new ProgressLogger(); pl.itemsName = "keys"; pl.displayFreeMemory = true; pl.start( "Adding keys..." ); if ( bitVector ) { ZFastTrie<LongArrayBitVector> zFastTrie = new ZFastTrie<LongArrayBitVector>( TransformationStrategies.identity() ); while( lineIterator.hasNext() ) { zFastTrie.add( LongArrayBitVector.copy( transformationStrategy.toBitVector( lineIterator.next().copy() ) ) ); pl.lightUpdate(); } pl.done(); BinIO.storeObject( zFastTrie, functionName ); } else { ZFastTrie<CharSequence> zFastTrie = new ZFastTrie<CharSequence>( transformationStrategy ); while( lineIterator.hasNext() ) { zFastTrie.add( lineIterator.next().copy() ); pl.lightUpdate(); } pl.done(); BinIO.storeObject( zFastTrie, functionName ); } LOGGER.info( "Completed." ); } @Override public ObjectBidirectionalIterator<T> iterator() { // TODO Auto-generated method stub return null; } @Override public ObjectSortedSet<T> headSet( T arg0 ) { // TODO Auto-generated method stub return null; } @Override public ObjectBidirectionalIterator<T> iterator( T arg0 ) { // TODO Auto-generated method stub return null; } @Override public ObjectSortedSet<T> subSet( T arg0, T arg1 ) { // TODO Auto-generated method stub return null; } @Override public ObjectSortedSet<T> tailSet( T arg0 ) { // TODO Auto-generated method stub return null; } @Override public Comparator<? super T> comparator() { // TODO Auto-generated method stub return null; } @Override public T first() { // TODO Auto-generated method stub return null; } @Override public T last() { // TODO Auto-generated method stub return null; } }
Towards getParentExitNode()...
src/it/unimi/dsi/sux4j/util/ZFastTrie.java
Towards getParentExitNode()...
Java
apache-2.0
c0474052f933b58aa5f479455cf547d13d9ea254
0
jrbn/dynamite
package nl.vu.cs.querypie.reasoner.actions; import nl.vu.cs.ajira.actions.Action; import nl.vu.cs.ajira.actions.ActionConf; import nl.vu.cs.ajira.actions.ActionContext; import nl.vu.cs.ajira.actions.ActionOutput; import nl.vu.cs.ajira.data.types.SimpleData; import nl.vu.cs.ajira.data.types.TLong; import nl.vu.cs.ajira.data.types.Tuple; import nl.vu.cs.querypie.ReasoningContext; import nl.vu.cs.querypie.reasoner.rules.Rule; import nl.vu.cs.querypie.reasoner.support.Pattern; import nl.vu.cs.querypie.reasoner.support.Term; public class GenericRuleExecutor extends Action { public static final int RULE_ID = 0; private Rule rule; private int[][] pos_gen_head; private final SimpleData[] outputTriple = new SimpleData[3]; @Override public void registerActionParameters(ActionConf conf) { conf.registerParameter(RULE_ID, "rule", null, true); } @Override public void startProcess(ActionContext context) throws Exception { rule = ReasoningContext.getInstance().getRule(getParamInt(RULE_ID)); pos_gen_head = rule.getSharedVariablesGen_Head(); Pattern head = rule.getHead(); for (int i = 0; i < 3; i++) { Term t = head.getTerm(i); if (t.getName() == null) { outputTriple[i] = new TLong(t.getValue()); } } } @Override public void process(Tuple tuple, ActionContext context, ActionOutput actionOutput) throws Exception { // Copy the "key" in the output triple for (int i = 0; i < pos_gen_head.length; ++i) { outputTriple[pos_gen_head[i][1]] = tuple.get(i); } actionOutput.output(outputTriple); } }
src/nl/vu/cs/querypie/reasoner/actions/GenericRuleExecutor.java
package nl.vu.cs.querypie.reasoner.actions; import nl.vu.cs.ajira.actions.Action; import nl.vu.cs.ajira.actions.ActionConf; import nl.vu.cs.ajira.actions.ActionContext; import nl.vu.cs.ajira.actions.ActionOutput; import nl.vu.cs.ajira.data.types.SimpleData; import nl.vu.cs.ajira.data.types.TLong; import nl.vu.cs.ajira.data.types.Tuple; import nl.vu.cs.querypie.ReasoningContext; import nl.vu.cs.querypie.reasoner.rules.Rule; import nl.vu.cs.querypie.reasoner.support.Pattern; import nl.vu.cs.querypie.reasoner.support.Term; public class GenericRuleExecutor extends Action { public static final int RULE_ID = 0; private Rule rule; private int[][] pos_gen_head; private final SimpleData[] outputTriple = new SimpleData[3]; @Override public void registerActionParameters(ActionConf conf) { conf.registerParameter(RULE_ID, "rule", null, true); } @Override public void startProcess(ActionContext context) throws Exception { rule = ReasoningContext.getInstance().getRule(getParamInt(RULE_ID)); pos_gen_head = rule.getSharedVariablesGen_Head(); Pattern head = rule.getHead(); for (int i = 0; i < 2; i++) { Term t = head.getTerm(i); if (t.getName() == null) { outputTriple[i] = new TLong(t.getValue()); } } } @Override public void process(Tuple tuple, ActionContext context, ActionOutput actionOutput) throws Exception { // Copy the "key" in the output triple for (int i = 0; i < pos_gen_head.length; ++i) { outputTriple[pos_gen_head[i][1]] = tuple.get(i); } actionOutput.output(outputTriple); System.out.println(outputTriple[0] + " " + outputTriple[1] + " " + outputTriple[2]); } }
Bug fix
src/nl/vu/cs/querypie/reasoner/actions/GenericRuleExecutor.java
Bug fix
Java
apache-2.0
9efd746f5874462453a9623f6439c4b8963166de
0
ProgrammingLife2016/PL4-2016
package core; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.util.Arrays; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; /** * Class responsible for the collapsing of nodes in the graph. * By collapsing nodes the size of the graph can be reduced. * Created by Niels Warnars on 2-5-2016. */ public final class GraphReducer { private GraphReducer() { } private static List<HashMap<Integer, Node>> levelMaps = new ArrayList<HashMap<Integer, Node>>(); /** * Give all nodes a list of its parents. * * @param nodeMap A HashMap containing all nodes in the graph. */ public static void determineParents(HashMap<Integer, Node> nodeMap) { for (int idx = 1; idx <= nodeMap.size(); idx++) { Node parent = nodeMap.get(idx); if (parent == null) { continue; } for (int childId : parent.getLinks()) { Node child = nodeMap.get(childId); if (child == null) { continue; } if (!child.getParents().contains(parent.getId())) { child.addParent(parent.getId()); } } } } /** * Perform collapsing on multiple level of nodes. * * @param startMap An uncollapsed node map. * @return A list of node maps with a decreasing amount of nodes. */ public static List<HashMap<Integer, Node>> createLevelMaps(HashMap<Integer, Node> startMap) { levelMaps.add(startMap); for (int i = 1;; i++) { HashMap<Integer, Node> levelMap = collapse(levelMaps.get(i - 1)); levelMaps.add(levelMap); int previousMapSize = levelMaps.get(i - 1).size(); int currentMapSize = levelMaps.get(i).size(); // Don't make any new zoom level if the number of nodes after reduction is only 2 less // than the number of nodes after previous reduction. if ((previousMapSize - currentMapSize) <= 2) { return levelMaps; } } } /** * Copy all values of a given node map. * * @param map A node map to be copied. * @return A copied node map. */ @SuppressFBWarnings("WMI_WRONG_MAP_ITERATOR") private static HashMap<Integer, Node> copyNodeMap(HashMap<Integer, Node> map) { HashMap<Integer, Node> res = new HashMap<Integer, Node>(); for (int i : map.keySet()) { Node n = map.get(i); Node newNode = new Node(n.getId(), n.getSequence(), n.getzIndex()); newNode.setLinks(n.getLinks()); newNode.setParents(n.getParents()); newNode.setGenomes(n.getGenomes()); res.put(i, newNode); } return res; } /** * Reduce the number of nodes in a graph by collapsing vertically and horizontally. * * @param map A HashMap containing all nodes in the graph. * @return A collapsed map. */ public static HashMap<Integer, Node> collapse(HashMap<Integer, Node> map) { HashMap<Integer, Node> nodeMap = copyNodeMap(map); determineParents(nodeMap); for (int idx = 1; idx <= map.size(); idx++) { Node parent = nodeMap.get(idx); if (parent == null) { continue; } collapseSymmetricalNodeBubble(nodeMap, parent); collapseAsymmetricalNodeBubble(nodeMap, parent); collapseNodeSequence(nodeMap, parent); } return nodeMap; } /** * Collapse a parent and its grandchild horizontally. * * @param nodeMap A HashMap containing all nodes in the graph. * @param parent A given parent node to be collapsed with its grandchild. * @return Whether the horizontal collapse action has succeeded. */ public static Boolean collapseNodeSequence(HashMap<Integer, Node> nodeMap, Node parent) { Boolean res = false; // Links must be present from parent --> child if (parent == null) { return false; } List<Integer> childrenIds = parent.getLinks(nodeMap); if (childrenIds.size() != 1) { return false; } for (int idx = 0; idx < childrenIds.size(); idx++) { Node child = nodeMap.get(childrenIds.get(idx)); // A child may only have one parent and grandchild if (child.getLinks(nodeMap).size() != 1) { return false; } if (child.getParents(nodeMap).size() != 1) { return false; } Node grandChild = nodeMap.get(child.getLinks(nodeMap).get(0)); addGenomes(parent, child); addGenomes(grandChild, child); parent.setLinks(new ArrayList<>(Arrays.asList(grandChild.getId()))); grandChild.setParents(new ArrayList<>(Arrays.asList(parent.getId()))); nodeMap.remove(child.getId()); res = true; } return res; } /** * Collapse a child1, if child2 is a grandchild of child1. * * @param nodeMap A HashMap containing all nodes in the graph. * @param parent A given parent node to be collapsed with a number of its children. * @return Whether nodes have been collapsed. */ public static Boolean collapseAsymmetricalNodeBubble(HashMap<Integer, Node> nodeMap, Node parent) { List<Integer> children = parent.getLinks(nodeMap); if (children.size() == 2) { int child1Id = children.get(0); int child2Id = children.get(1); List<Integer> child1ChildrenIds = nodeMap.get(child1Id).getLinks(nodeMap); List<Integer> child2ChildrenIds = nodeMap.get(child2Id).getLinks(nodeMap); Node child1 = nodeMap.get(child1Id); Node child2 = nodeMap.get(child2Id); if (child1ChildrenIds.contains(child2Id)) { addGenomes(child2, child1); nodeMap.remove(child1Id); return true; } else if (child2ChildrenIds.contains(child1Id)) { addGenomes(child1, child2); nodeMap.remove(child2Id); return true; } } return false; } /** * Collapse a parent and its grandchild horizontally. * * @param nodeMap A HashMap containing all nodes in the graph. * @param parent A given parent node to be collapsed with a number of its children. * @return Whether nodes have been collapsed. */ public static Boolean collapseSymmetricalNodeBubble(HashMap<Integer, Node> nodeMap, Node parent) { List<Integer> children = parent.getLinks(nodeMap); if (children.size() <= 1) { return false; } // Check whether all children only have one child for (int i = 0; i < children.size(); i++) { if (nodeMap.get(children.get(i)).getLinks(nodeMap).size() != 1) { return false; } } // Check whether all grand children are the same for (int i = 0; i < children.size() - 1; i++) { Integer grandChild1 = nodeMap.get(children.get(i)).getLinks(nodeMap).get(0); Integer grandChild2 = nodeMap.get(children.get(i + 1)).getLinks(nodeMap).get(0); if (!grandChild1.equals(grandChild2)) { return false; } } // If one of the children in the bubble has a piece of DNA sequence containing more // than one nucleotide then abort. for (int i = 0; i < children.size(); i++) { if (nodeMap.get(children.get(i)).getSequence().length() > 1) { return false; } } Node child0 = nodeMap.get(children.get(0)); // Remove redundant nodes in bubble for (int i = 1; i < children.size(); i++) { int childId = children.get(i); Node child = nodeMap.get(childId); if (child != null) { addGenomes(child0, child); nodeMap.remove(childId); } } return true; } /** * Adds a list of genomes to another list. * * @param base Base node. * @param toAdd Node of which its genome has to be added to the base node. */ public static void addGenomes(Node base, Node toAdd) { Set<String> hs = new LinkedHashSet<String>(base.getGenomes()); hs.addAll(toAdd.getGenomes()); base.setGenomes(new ArrayList<String>(hs)); } }
src/main/java/core/GraphReducer.java
package core; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.util.Arrays; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; /** * Class responsible for the collapsing of nodes in the graph. * By collapsing nodes the size of the graph can be reduced. * Created by Niels Warnars on 2-5-2016. */ public final class GraphReducer { private GraphReducer() { } private static List<HashMap<Integer, Node>> levelMaps = new ArrayList<HashMap<Integer, Node>>(); /** * Give all nodes a list of its parents. * * @param nodeMap A HashMap containing all nodes in the graph. */ public static void determineParents(HashMap<Integer, Node> nodeMap) { for (int idx = 1; idx <= nodeMap.size(); idx++) { Node parent = nodeMap.get(idx); if (parent == null) { continue; } for (int childId : parent.getLinks()) { Node child = nodeMap.get(childId); if (child == null) { continue; } if (!child.getParents().contains(parent.getId())) { child.addParent(parent.getId()); } } } } /** * Perform collapsing on multiple level of nodes. * * @param startMap An uncollapsed node map. * @return A list of node maps with a decreasing amount of nodes. */ public static List<HashMap<Integer, Node>> createLevelMaps(HashMap<Integer, Node> startMap) { levelMaps.add(startMap); for (int i = 1;; i++) { HashMap<Integer, Node> levelMap = collapse(levelMaps.get(i - 1)); levelMaps.add(levelMap); int previousMapSize = levelMaps.get(i - 1).size(); int currentMapSize = levelMaps.get(i).size(); // Don't make any new zoom level if the number of nodes after reduction is only 2 less // than the number of nodes after previous reduction. if ((previousMapSize - currentMapSize) <= 2) { return levelMaps; } } } /** * Copy all values of a given node map. * * @param map A node map to be copied. * @return A copied node map. */ @SuppressFBWarnings("WMI_WRONG_MAP_ITERATOR") private static HashMap<Integer, Node> copyNodeMap(HashMap<Integer, Node> map) { HashMap<Integer, Node> res = new HashMap<Integer, Node>(); for (int i : map.keySet()) { Node n = map.get(i); Node newNode = new Node(n.getId(), n.getSequence(), n.getzIndex()); newNode.setLinks(n.getLinks()); newNode.setParents(n.getParents()); newNode.setGenomes(n.getGenomes()); res.put(i, newNode); } return res; } /** * Reduce the number of nodes in a graph by collapsing vertically and horizontally. * * @param map A HashMap containing all nodes in the graph. * @return A collapsed map. */ public static HashMap<Integer, Node> collapse(HashMap<Integer, Node> map) { HashMap<Integer, Node> nodeMap = copyNodeMap(map); determineParents(nodeMap); for (int idx = 1; idx <= map.size(); idx++) { Node parent = nodeMap.get(idx); if (parent == null) { continue; } collapseSymmetricalNodeBubble(nodeMap, parent); collapseAsymmetricalNodeBubble(nodeMap, parent); collapseNodeSequence(nodeMap, parent); } return nodeMap; } /** * Collapse a parent and its grandchild horizontally. * * @param nodeMap A HashMap containing all nodes in the graph. * @param parent A given parent node to be collapsed with its grandchild. * @return Whether the horizontal collapse action has succeeded. */ public static Boolean collapseNodeSequence(HashMap<Integer, Node> nodeMap, Node parent) { Boolean res = false; // Links must be present from parent --> child if (parent == null) { return false; } List<Integer> childrenIds = parent.getLinks(nodeMap); if (childrenIds.size() != 1) { return false; } for (int idx = 0; idx < childrenIds.size(); idx++) { Node child = nodeMap.get(childrenIds.get(idx)); // A child may only have one parent and grandchild if (child.getLinks(nodeMap).size() != 1) { return false; } if (child.getParents(nodeMap).size() != 1) { return false; } Node grandChild = nodeMap.get(child.getLinks(nodeMap).get(0)); addGenomes(parent, child); addGenomes(grandChild, child); parent.setLinks(new ArrayList<>(Arrays.asList(grandChild.getId()))); grandChild.setParents(new ArrayList<>(Arrays.asList(parent.getId()))); nodeMap.remove(child.getId()); res = true; } return res; } /** * Collapse a child1, if child2 is a grandchild of child1. * * @param nodeMap A HashMap containing all nodes in the graph. * @param parent A given parent node to be collapsed with a number of its children. * @return Whether nodes have been collapsed. */ public static Boolean collapseAsymmetricalNodeBubble(HashMap<Integer, Node> nodeMap, Node parent) { List<Integer> children = parent.getLinks(nodeMap); if (children.size() == 2) { int child1Id = children.get(0); int child2Id = children.get(1); List<Integer> child1ChildrenIds = nodeMap.get(child1Id).getLinks(nodeMap); List<Integer> child2ChildrenIds = nodeMap.get(child2Id).getLinks(nodeMap); Node child1 = nodeMap.get(child1Id); Node child2 = nodeMap.get(child2Id); if (child1ChildrenIds.contains(child2Id)) { addGenomes(child2, child1); nodeMap.remove(child1Id); return true; } else if (child2ChildrenIds.contains(child1Id)) { addGenomes(child1, child2); nodeMap.remove(child2Id); return true; } } return false; } /** * Collapse a parent and its grandchild horizontally. * * @param nodeMap A HashMap containing all nodes in the graph. * @param parent A given parent node to be collapsed with a number of its children. * @return Whether nodes have been collapsed. */ public static Boolean collapseSymmetricalNodeBubble(HashMap<Integer, Node> nodeMap, Node parent) { List<Integer> children = parent.getLinks(nodeMap); if (children.size() <= 1) { return false; } // Check whether all children only have one child for (int i = 0; i < children.size(); i++) { if (nodeMap.get(children.get(i)).getLinks(nodeMap).size() != 1) { return false; } } // Check whether all grand children are the same for (int i = 0; i < children.size() - 1; i++) { Integer grandChild1 = nodeMap.get(children.get(i)).getLinks(nodeMap).get(0); Integer grandChild2 = nodeMap.get(children.get(i + 1)).getLinks(nodeMap).get(0); if (!grandChild1.equals(grandChild2)) { return false; } } // If one of the children in the bubble has a genome contains more than one nucleotide // then abort. for (int i = 0; i < children.size(); i++) { if (nodeMap.get(children.get(i)).getSequence().length() > 1) { return false; } } Node child0 = nodeMap.get(children.get(0)); // Remove redundant nodes in bubble for (int i = 1; i < children.size(); i++) { int childId = children.get(i); Node child = nodeMap.get(childId); if (child != null) { addGenomes(child0, child); nodeMap.remove(childId); } } return true; } /** * Adds a list of genomes to another list. * * @param base Base node. * @param toAdd Node of which its genome has to be added to the base node. */ public static void addGenomes(Node base, Node toAdd) { Set<String> hs = new LinkedHashSet<String>(base.getGenomes()); hs.addAll(toAdd.getGenomes()); base.setGenomes(new ArrayList<String>(hs)); } }
Fix comment
src/main/java/core/GraphReducer.java
Fix comment