conflict_resolution
stringlengths 27
16k
|
---|
<<<<<<<
import com.canoo.dolphin.util.DolphinRemotingException;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
=======
import com.canoo.dolphin.util.Assert;
>>>>>>>
import com.canoo.dolphin.util.Assert;
import com.canoo.dolphin.util.DolphinRemotingException;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
<<<<<<<
=======
private static ClientDolphin createClientDolphin(final ClientConfiguration clientConfiguration) {
Assert.requireNonNull(clientConfiguration, "clientConfiguration");
final ClientDolphin clientDolphin = new ClientDolphin();
clientDolphin.setClientModelStore(new ClientModelStore(clientDolphin));
final HttpClientConnector clientConnector = new HttpClientConnector(clientDolphin, new BlindCommandBatcher(), clientConfiguration.getServerEndpoint());
clientConnector.setCodec(new OptimizedJsonCodec());
clientConnector.setUiThreadHandler(clientConfiguration.getUiThreadHandler());
clientDolphin.setClientConnector(clientConnector);
return clientDolphin;
}
>>>>>>> |
<<<<<<<
import java.util.Collections;
import java.util.HashMap;
=======
>>>>>>>
import java.util.Collections;
<<<<<<<
import java.util.Set;
=======
import java.util.concurrent.ConcurrentHashMap;
>>>>>>>
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; |
<<<<<<<
public static String escapeP4String(String string) {
if(string == null) return null;
String result = new String(string);
result = result.replace("%","%25");
result = result.replace("@","%40");
result = result.replace("#","%23");
result = result.replace("*","%2A");
return result;
}
public static String unescapeP4String(String string) {
if(string == null) return null;
String result = new String(string);
result = result.replace("%40","@");
result = result.replace("%23","#");
result = result.replace("%2A","*");
result = result.replace("%25","%");
return result;
}
=======
/**
* Append Perforce workspace name with a Jenkins workspace identifier, if this
* is a concurrent build job.
*
* @param workspace Workspace of the current build
* @param p4Client User defined client name
* @return The new client name. If this is a concurrent build with, append the
* client name with a Jenkins workspace identifier.
*/
private String getConcurrentClientName(FilePath workspace, String p4Client) {
if (workspace != null) {
//Match @ followed by an integer at the end of the workspace path
Pattern p = Pattern.compile(".*@(\\d+)$");
Matcher matcher = p.matcher(workspace.getRemote());
if (matcher.find()) {
p4Client += "_" + matcher.group(1);
}
}
return p4Client;
}
>>>>>>>
public static String escapeP4String(String string) {
if(string == null) return null;
String result = new String(string);
result = result.replace("%","%25");
result = result.replace("@","%40");
result = result.replace("#","%23");
result = result.replace("*","%2A");
return result;
}
public static String unescapeP4String(String string) {
if(string == null) return null;
String result = new String(string);
result = result.replace("%40","@");
result = result.replace("%23","#");
result = result.replace("%2A","*");
result = result.replace("%25","%");
return result;
}
/**
* Append Perforce workspace name with a Jenkins workspace identifier, if this
* is a concurrent build job.
*
* @param workspace Workspace of the current build
* @param p4Client User defined client name
* @return The new client name. If this is a concurrent build with, append the
* client name with a Jenkins workspace identifier.
*/
private String getConcurrentClientName(FilePath workspace, String p4Client) {
if (workspace != null) {
//Match @ followed by an integer at the end of the workspace path
Pattern p = Pattern.compile(".*@(\\d+)$");
Matcher matcher = p.matcher(workspace.getRemote());
if (matcher.find()) {
p4Client += "_" + matcher.group(1);
}
}
return p4Client;
} |
<<<<<<<
public boolean isDisableSyncOnly() {
return disableSyncOnly;
}
public void setDisableSyncOnly(boolean disableSyncOnly) {
this.disableSyncOnly = disableSyncOnly;
}
=======
public String getExcludedUsers() {
return excludedUsers;
}
public void setExcludedUsers(String users) {
excludedUsers = users;
}
public String getExcludedFiles() {
return excludedFiles;
}
public void setExcludedFiles(String files) {
excludedFiles = files;
}
>>>>>>>
public boolean isDisableSyncOnly() {
return disableSyncOnly;
}
public void setDisableSyncOnly(boolean disableSyncOnly) {
this.disableSyncOnly = disableSyncOnly;
}
public String getExcludedUsers() {
return excludedUsers;
}
public void setExcludedUsers(String users) {
excludedUsers = users;
}
public String getExcludedFiles() {
return excludedFiles;
}
public void setExcludedFiles(String files) {
excludedFiles = files;
} |
<<<<<<<
@Override
public boolean exists(final ITileSource pTileSource, final MapTile pTile) {
return new File(OpenStreetMapTileProviderConstants.TILE_PATH_BASE, pTileSource.getTileRelativeFilenameString(pTile)
+ OpenStreetMapTileProviderConstants.TILE_PATH_EXTENSION).exists();
}
=======
@Override
public void onDetach() {
if (initThread!=null){
try {
initThread.interrupt();
}catch (Throwable t){}
}
}
>>>>>>>
@Override
public void onDetach() {
if (initThread!=null){
try {
initThread.interrupt();
}catch (Throwable t){}
}
}
@Override
public boolean exists(final ITileSource pTileSource, final MapTile pTile) {
return new File(OpenStreetMapTileProviderConstants.TILE_PATH_BASE, pTileSource.getTileRelativeFilenameString(pTile)
+ OpenStreetMapTileProviderConstants.TILE_PATH_EXTENSION).exists();
} |
<<<<<<<
private static final BoundingBox sCentralParkBoundingBox;
private static final Paint sPaint;
=======
private BoundingBoxE6 sCentralParkBoundingBox;
private Paint sPaint;
>>>>>>>
private BoundingBox sCentralParkBoundingBox;
private Paint sPaint;
<<<<<<<
static {
sCentralParkBoundingBox = new BoundingBox(40.796788,
=======
public SampleLimitedScrollArea()
{
sCentralParkBoundingBox = new BoundingBoxE6(40.796788,
>>>>>>>
public SampleLimitedScrollArea()
{
sCentralParkBoundingBox = new BoundingBox(40.796788, |
<<<<<<<
public Drawable getDrawable(final String aFilePath) {
//Log.d(IMapView.LOGTAG, aFilePath + " attempting to load bitmap");
=======
public Drawable getDrawable(final String aFilePath) throws LowMemoryException {
>>>>>>>
public Drawable getDrawable(final String aFilePath) throws LowMemoryException {
//Log.d(IMapView.LOGTAG, aFilePath + " attempting to load bitmap"); |
<<<<<<<
// @Override
public void zoomToSpan(final double pLatSpan, final double pLonSpan) {
mController.zoomToSpan((int)(pLatSpan*1E6), (int)(pLonSpan*1E6));
=======
@Override
public boolean zoomTo(int zoomLevel) {
return setZoom(zoomLevel) > 0;
}
@Override
public boolean zoomToFixing(int zoomLevel, int xPixel, int yPixel) {
return setZoom(zoomLevel) > 0;
}
@Override
public void zoomToSpan(final int pLatSpanE6, final int pLonSpanE6) {
mController.zoomToSpan(pLatSpanE6, pLonSpanE6);
>>>>>>>
@Override
public boolean zoomTo(int zoomLevel) {
return setZoom(zoomLevel) > 0;
}
@Override
public boolean zoomToFixing(int zoomLevel, int xPixel, int yPixel) {
return setZoom(zoomLevel) > 0;
}
@Override
public void zoomToSpan(final int pLatSpanE6, final int pLonSpanE6) {
mController.zoomToSpan(pLatSpanE6, pLonSpanE6);
}
// @Override
public void zoomToSpan(final double pLatSpan, final double pLonSpan) {
mController.zoomToSpan((int)(pLatSpan*1E6), (int)(pLonSpan*1E6));
<<<<<<<
// @Override
public boolean isInvertedTiles() {
return false;
}
// @Override
public void setInvertedTiles(boolean value) {
}
@Override
public void zoomToSpan(int latSpanE6, int lonSpanE6) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
=======
>>>>>>> |
<<<<<<<
/**
* returns true if the map tiles are currently being color inverted
* @return
*/
boolean isInvertedTiles();
void setInvertedTiles(boolean value);
void zoomToSpan(double latSpan, double lonSpan);
=======
>>>>>>>
void zoomToSpan(double latSpan, double lonSpan); |
<<<<<<<
false, true, false, false, "${basename}", 0, browser);
=======
true, false, false, "${basename}", 0, browser, "exclude_user", "exclude_file");
>>>>>>>
false, true, false, false, "${basename}", 0, browser, "exclude_user", "exclude_file");
<<<<<<<
false, true, false, false, "${basename}", 0, browser);
=======
true, false, false, "${basename}", 0, browser, "exclude_user", "exclude_file");
>>>>>>>
false, true, false, false, "${basename}", 0, browser, "exclude_user", "exclude_file");
<<<<<<<
false, true, false, false, "${basename}", 0, browser);
=======
true, false, false, "${basename}", 0, browser, "exclude_user", "exclude_file");
>>>>>>>
false, true, false, false, "${basename}", 0, browser, "exclude_user", "exclude_file");
<<<<<<<
false, true, false, false, "${basename}", 0, browser);
=======
true, false, false, "${basename}", 0, browser, "exclude_user", "exclude_file");
>>>>>>>
false, true, false, false, "${basename}", 0, browser, "exclude_user", "exclude_file"); |
<<<<<<<
mSamples.add(DrawPolylineWithArrows.class);
=======
mSamples.add(DrawPolygonWithoutVerticalWrapping.class);
mSamples.add(SampleDrawPolylineWithoutVerticalWrapping.class);
>>>>>>>
mSamples.add(DrawPolygonWithoutVerticalWrapping.class);
mSamples.add(SampleDrawPolylineWithoutVerticalWrapping.class);
mSamples.add(DrawPolylineWithArrows.class); |
<<<<<<<
import org.osmdroid.util.BoundingBox;
=======
import org.osmdroid.events.ScrollEvent;
import org.osmdroid.events.ZoomEvent;
import org.osmdroid.util.BoundingBoxE6;
>>>>>>>
import org.osmdroid.util.BoundingBox;
import org.osmdroid.events.ScrollEvent;
import org.osmdroid.events.ZoomEvent;
import org.osmdroid.util.BoundingBoxE6; |
<<<<<<<
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertEquals;
=======
>>>>>>>
import static org.junit.Assert.assertEquals; |
<<<<<<<
final String executionId = dockerRunnerStart(state);
final Event submitted = Event.submitted(state.workflowInstance(), executionId);
try {
stateManager.receive(submitted);
} catch (StateManager.IsClosed isClosed) {
LOG.warn("Could not send 'created' event", isClosed);
}
=======
runSpec = createRunSpec(state);
>>>>>>>
runSpec = createRunSpec(state);
<<<<<<<
final DockerRunner.RunSpec runSpec = DockerRunner.RunSpec.create(
executionId,
=======
return RunSpec.create(
>>>>>>>
return RunSpec.create(
executionId,
<<<<<<<
LOG.info("running:{} image:{} args:{} termination_logging:{}", workflowInstance.toKey(),
runSpec.imageName(), runSpec.args(), runSpec.terminationLogging());
dockerRunner.start(workflowInstance, runSpec);
return executionId;
=======
>>>>>>> |
<<<<<<<
log.warn("Failed to remove dangling NEW state for: {}", workflowInstance);
=======
LOG.warn("Failed to remove dangling NEW state for: {}", workflowInstance, e);
>>>>>>>
log.warn("Failed to remove dangling NEW state for: {}", workflowInstance, e); |
<<<<<<<
var queuedStateManager = closer.register(new PersistentStateManager(time, stateProcessingExecutor,
storage, eventConsumer, eventConsumerExecutor, fanOutput(outputHandlers),
shardedCounter));
=======
var outputHandler = OutputHandler.mdcDecorating(fanOutput(outputHandlers));
var queuedStateManager = closer.register(new QueuedStateManager(time, eventProcessingExecutor,
storage, eventConsumer, eventConsumerExecutor, outputHandler, shardedCounter));
>>>>>>>
var outputHandler = OutputHandler.mdcDecorating(fanOutput(outputHandlers));
var queuedStateManager = closer.register(new PersistentStateManager(time, stateProcessingExecutor,
storage, eventConsumer, eventConsumerExecutor, outputHandler, shardedCounter)); |
<<<<<<<
public static final Duration DEFAULT_RETRY_BASE_DELAY_BT = Duration.ofSeconds(1);
=======
static final Duration DEFAULT_RETRY_BASE_DELAY_BT = Duration.ofSeconds(1);
static final String AUTHORIZATION_SERVICE_ACCOUNT_USER_ROLE_CONFIG = "styx.authorization.service-account-user-role";
static final String AUTHORIZATION_GSUITE_USER_CONFIG = "styx.authorization.gsuite-user";
static final String AUTHORIZATION_REQUIRE_ALL_CONFIG = "styx.authorization.require.all";
static final String AUTHORIZATION_REQUIRE_WORKFLOWS = "styx.authorization.require.workflows";
static final String AUTHORIZATION_MESSAGE_CONFIG = "styx.authorization.message";
static final String STYX_RUNNING_STATE_TTL_CONFIG = "styx.stale-state-ttls.running";
static final String DEFAULT_STYX_RUNNING_STATE_TTL = "PT24H";
>>>>>>>
static final Duration DEFAULT_RETRY_BASE_DELAY_BT = Duration.ofSeconds(1);
static final String STYX_RUNNING_STATE_TTL_CONFIG = "styx.stale-state-ttls.running";
static final String DEFAULT_STYX_RUNNING_STATE_TTL = "PT24H";
<<<<<<<
=======
private static String getConfigWithDefault(Config config, String key, String defaultValue) {
return config.hasPath(key) ? config.getString(key) : defaultValue;
}
@VisibleForTesting
static ServiceAccountUsageAuthorizer serviceAccountUsageAuthorizer(Config config,
GoogleCredential credential,
String serviceName) {
final AuthorizationPolicy authorizationPolicy = authorizationPolicy(config);
final ServiceAccountUsageAuthorizer authorizer;
if (config.hasPath(AUTHORIZATION_SERVICE_ACCOUNT_USER_ROLE_CONFIG)) {
final String role = config.getString(AUTHORIZATION_SERVICE_ACCOUNT_USER_ROLE_CONFIG);
final String gsuiteUserEmail = config.getString(AUTHORIZATION_GSUITE_USER_CONFIG);
final String message = getConfigWithDefault(config, AUTHORIZATION_MESSAGE_CONFIG, "");
authorizer = ServiceAccountUsageAuthorizer.create(
role, authorizationPolicy, credential, gsuiteUserEmail, serviceName, message);
} else {
authorizer = ServiceAccountUsageAuthorizer.NOP;
}
return authorizer;
}
@VisibleForTesting
static AuthorizationPolicy authorizationPolicy(Config config) {
final AuthorizationPolicy authorizationPolicy;
if (config.hasPath(AUTHORIZATION_REQUIRE_ALL_CONFIG) &&
config.getBoolean(AUTHORIZATION_REQUIRE_ALL_CONFIG)) {
authorizationPolicy = new ServiceAccountUsageAuthorizer.AllAuthorizationPolicy();
} else if (config.hasPath(AUTHORIZATION_REQUIRE_WORKFLOWS)) {
final List<WorkflowId> ids = config.getStringList(AUTHORIZATION_REQUIRE_WORKFLOWS).stream()
.map(WorkflowId::parseKey)
.collect(Collectors.toList());
authorizationPolicy = new ServiceAccountUsageAuthorizer.WhitelistAuthorizationPolicy(ids);
} else {
authorizationPolicy = new ServiceAccountUsageAuthorizer.NoAuthorizationPolicy();
}
return authorizationPolicy;
}
>>>>>>>
private static String getConfigWithDefault(Config config, String key, String defaultValue) {
return config.hasPath(key) ? config.getString(key) : defaultValue;
} |
<<<<<<<
public interface EventConsumer extends BiConsumer<SequenceEvent, RunState> { }
public interface EventConsumerFactory extends BiFunction<Environment, Stats, EventConsumer> { }
public interface WorkflowExecutionGateFactory extends BiFunction<Environment, Storage, WorkflowExecutionGate> { }
=======
public interface EventConsumerFactory extends BiFunction<Environment, Stats, BiConsumer<SequenceEvent, RunState>> { }
>>>>>>>
public interface EventConsumer extends BiConsumer<SequenceEvent, RunState> { }
public interface EventConsumerFactory extends BiFunction<Environment, Stats, EventConsumer> { } |
<<<<<<<
if (segmentReaders!=null && segmentReaders.size() > 0) {
final AtomicInteger skipDocs = new AtomicInteger(0);
final SenseiIndexPruner pruner = _core.getIndexPruner();
List<BoboIndexReader> validatedSegmentReaders = timerMetric.time(new Callable<List<BoboIndexReader>>(){
@Override
public List<BoboIndexReader> call() throws Exception {
IndexReaderSelector readerSelector = pruner.getReaderSelector(request);
List<BoboIndexReader> validatedReaders = new ArrayList<BoboIndexReader>(segmentReaders.size());
for (BoboIndexReader segmentReader : segmentReaders){
if (readerSelector.isSelected(segmentReader)){
validatedReaders.add(segmentReader);
}
else{
skipDocs.addAndGet(segmentReader.numDocs());
}
}
return validatedReaders;
}
});
pruner.sort(validatedSegmentReaders);
browser = new MultiBoboBrowser(BoboBrowser.createBrowsables(validatedSegmentReaders));
BrowseRequest breq = RequestConverter.convert(request, queryBuilderFactory);
if (request.getMapReduceFunction() != null) {
SenseiMapFunctionWrapper mapWrapper = new SenseiMapFunctionWrapper(request.getMapReduceFunction(), _core.getSystemInfo().getFacetInfos());
breq.setMapReduceWrapper(mapWrapper);
}
SubReaderAccessor<BoboIndexReader> subReaderAccessor =
ZoieIndexReader.getSubReaderAccessor(validatedSegmentReaders);
SenseiResult res = browse(request, browser, breq, subReaderAccessor);
int totalDocs = res.getTotalDocs()+skipDocs.get();
res.setTotalDocs(totalDocs);
// For debugging serialization issues:
// byte[] responseBytes = getSerializer().responseToBytes(res);
// SenseiResult response2 = getSerializer().responseFromBytes(responseBytes);
// if(!res.equals(response2)) {
// throw new IllegalArgumentException("Cant serialize response");
// }
return res;
=======
if (segmentReaders!=null && segmentReaders.size()>0){
final AtomicInteger skipDocs = new AtomicInteger(0);
List<BoboIndexReader> validatedSegmentReaders = _timerMetric.time(new Callable<List<BoboIndexReader>>(){
@Override
public List<BoboIndexReader> call() throws Exception {
SenseiIndexPruner pruner = _core.getIndexPruner();
IndexReaderSelector readerSelector = pruner.getReaderSelector(request);
List<BoboIndexReader> validatedReaders = new ArrayList<BoboIndexReader>(segmentReaders.size());
for (BoboIndexReader segmentReader : segmentReaders){
if (readerSelector.isSelected(segmentReader)){
validatedReaders.add(segmentReader);
}
else{
skipDocs.addAndGet(segmentReader.numDocs());
}
}
return validatedReaders;
}
});
browser = new MultiBoboBrowser(BoboBrowser.createBrowsables(validatedSegmentReaders));
BrowseRequest breq = RequestConverter.convert(request, queryBuilderFactory);
if (request.getMapReduceFunction() != null) {
SenseiMapFunctionWrapper mapWrapper = new SenseiMapFunctionWrapper(request.getMapReduceFunction(), _core.getSystemInfo().getFacetInfos());
breq.setMapReduceWrapper(mapWrapper);
}
SubReaderAccessor<BoboIndexReader> subReaderAccessor =
ZoieIndexReader.getSubReaderAccessor(validatedSegmentReaders);
SenseiResult res = browse(browser, breq, subReaderAccessor);
int totalDocs = res.getTotalDocs()+skipDocs.get();
res.setTotalDocs(totalDocs);
return res;
>>>>>>>
if (segmentReaders!=null && segmentReaders.size() > 0) {
final AtomicInteger skipDocs = new AtomicInteger(0);
final SenseiIndexPruner pruner = _core.getIndexPruner();
List<BoboIndexReader> validatedSegmentReaders = _timerMetric.time(new Callable<List<BoboIndexReader>>(){
@Override
public List<BoboIndexReader> call() throws Exception {
IndexReaderSelector readerSelector = pruner.getReaderSelector(request);
List<BoboIndexReader> validatedReaders = new ArrayList<BoboIndexReader>(segmentReaders.size());
for (BoboIndexReader segmentReader : segmentReaders){
if (readerSelector.isSelected(segmentReader)){
validatedReaders.add(segmentReader);
}
else{
skipDocs.addAndGet(segmentReader.numDocs());
}
}
return validatedReaders;
}
});
pruner.sort(validatedSegmentReaders);
browser = new MultiBoboBrowser(BoboBrowser.createBrowsables(validatedSegmentReaders));
BrowseRequest breq = RequestConverter.convert(request, queryBuilderFactory);
if (request.getMapReduceFunction() != null) {
SenseiMapFunctionWrapper mapWrapper = new SenseiMapFunctionWrapper(request.getMapReduceFunction(), _core.getSystemInfo().getFacetInfos());
breq.setMapReduceWrapper(mapWrapper);
}
SubReaderAccessor<BoboIndexReader> subReaderAccessor =
ZoieIndexReader.getSubReaderAccessor(validatedSegmentReaders);
SenseiResult res = browse(request, browser, breq, subReaderAccessor);
int totalDocs = res.getTotalDocs()+skipDocs.get();
res.setTotalDocs(totalDocs);
// For debugging serialization issues:
// byte[] responseBytes = getSerializer().responseToBytes(res);
// SenseiResult response2 = getSerializer().responseFromBytes(responseBytes);
// if(!res.equals(response2)) {
// throw new IllegalArgumentException("Cant serialize response");
// }
return res; |
<<<<<<<
=======
protected SenseiPluginRegistry pluginRegistry;
public static Comparator<String> DEFAULT_VERSION_COMPARATOR = ZoieConfig.DEFAULT_VERSION_COMPARATOR;
>>>>>>>
protected SenseiPluginRegistry pluginRegistry;
public static Comparator<String> DEFAULT_VERSION_COMPARATOR = ZoieConfig.DEFAULT_VERSION_COMPARATOR; |
<<<<<<<
=======
private Filter _purgeFilter = null;
private OptimizeScheduler _optimizeScheduler;
>>>>>>>
private OptimizeScheduler _optimizeScheduler;
<<<<<<<
=======
public void setPurgeFilter(Filter purgeFilter){
_purgeFilter = purgeFilter;
}
public void setOptimizeScheduler(OptimizeScheduler optimizeScheduler)
{
_optimizeScheduler = optimizeScheduler;
}
>>>>>>>
public void setOptimizeScheduler(OptimizeScheduler optimizeScheduler)
{
_optimizeScheduler = optimizeScheduler;
}
<<<<<<<
=======
if (_purgeFilter!=null){
zoie.setPurgeFilter(_purgeFilter);
}
if (_optimizeScheduler != null) {
zoie.setOptimizeScheduler(_optimizeScheduler);
}
>>>>>>>
if (_optimizeScheduler != null) {
zoie.setOptimizeScheduler(_optimizeScheduler);
} |
<<<<<<<
} else if (type.equals("weighted-multi")) {
facetHandler = buildWeightedMultiHandler(name, fieldName, termListFactoryMap.get(fieldName), dependSet);
} else if (type.equals("multi-range")) {
facetHandler = new MultiRangeFacetHandler(name, fieldName, null, termListFactoryMap.get(fieldName) , buildPredefinedRanges(paramMap));
} else if (type.equals("attribute")) {
facetHandler = new AttributesFacetHandler(name, fieldName, termListFactoryMap.get(fieldName), null , facetProps);
} else if (type.equals("histogram")) {
=======
} else if (type.equals("attribute")) {
facetHandler = new AttributesFacetHandler(name, fieldName, termListFactoryMap.get(fieldName), null , facetProps);
} else if (type.equals("histogram")) {
>>>>>>>
} else if (type.equals("weighted-multi")) {
facetHandler = buildWeightedMultiHandler(name, fieldName, termListFactoryMap.get(fieldName), dependSet);
} else if (type.equals("multi-range")) {
facetHandler = new MultiRangeFacetHandler(name, fieldName, null, termListFactoryMap.get(fieldName), buildPredefinedRanges(paramMap));
} else if (type.equals("attribute")) {
facetHandler = new AttributesFacetHandler(name, fieldName, termListFactoryMap.get(fieldName), null, facetProps);
} else if (type.equals("histogram")) { |
<<<<<<<
public void testRecordParsingWithPipeSeparator() throws Exception {
recordParser = new RecordParserImpl(3,"|",false,"");
=======
public void testRecordParsingWithPipeDelimiter() throws Exception {
recordParser = new RecordParserImpl(3,"|");
>>>>>>>
public void testRecordParsingWithPipeDelimiter() throws Exception {
recordParser = new RecordParserImpl(3,"|",false,"");
<<<<<<<
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithSpaceSeparator() throws Exception {
recordParser = new RecordParserImpl(3," ",false,"");
record = "hello cb4j world";
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithTabSeparator() throws Exception {
recordParser = new RecordParserImpl(3,"\t",false,"");
record = "hello\tcb4j\tworld";
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithMultipleCharacterSeparator() throws Exception {
recordParser = new RecordParserImpl(3,"###",false,"");
record = "hello###cb4j###world";
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithDataQualifierCharacter() throws Exception {
recordParser = new RecordParserImpl(3,",",false,"'");
record = "'hello','cb4j','world'";
assertNull(recordParser.analyseRecord(record));
=======
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithSpaceDelimiter() throws Exception {
recordParser = new RecordParserImpl(3," ");
record = "hello cb4j world";
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithTabDelimiter() throws Exception {
recordParser = new RecordParserImpl(3,"\t");
record = "hello\tcb4j\tworld";
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithMultipleCharacterDelimiter() throws Exception {
recordParser = new RecordParserImpl(3,"###");
record = "hello###cb4j###world";
assertNull(recordParser.analyseRecord(record));
>>>>>>>
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithSpaceDelimiter() throws Exception {
recordParser = new RecordParserImpl(3," ",false,"");
record = "hello cb4j world";
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithTabDelimiter() throws Exception {
recordParser = new RecordParserImpl(3,"\t",false,"");
record = "hello\tcb4j\tworld";
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithMultipleCharacterDelimiter() throws Exception {
recordParser = new RecordParserImpl(3,"###",false,"");
record = "hello###cb4j###world";
assertNull(recordParser.analyseRecord(record));
assertEquals(3, recordParser.getRecordSize(record));
Record parsedRecord = recordParser.parseRecord(record, 1);
assertEquals("hello",parsedRecord.getFieldContentByIndex(0));
assertEquals("cb4j",parsedRecord.getFieldContentByIndex(1));
assertEquals("world",parsedRecord.getFieldContentByIndex(2));
}
@Test
public void testRecordParsingWithDataQualifierCharacter() throws Exception {
recordParser = new RecordParserImpl(3,",",false,"'");
record = "'hello','cb4j','world'";
assertNull(recordParser.analyseRecord(record)); |
<<<<<<<
import org.apache.lucene.index.Term;
import org.apache.lucene.search.MatchAllDocsQuery;
=======
import java.util.HashMap;
import java.util.Map;
>>>>>>>
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.MatchAllDocsQuery;
<<<<<<<
import org.apache.lucene.search.TermQuery;
=======
import org.json.JSONException;
>>>>>>>
import org.apache.lucene.search.TermQuery;
import org.json.JSONException; |
<<<<<<<
if (facetMBean!=null){
ObjectName facetMbeanName = new ObjectName("FacetHandler","name",facetHandler.getName());
mbeanServer.registerMBean(facetMBean, facetMbeanName);
_registeredMBeans.add(facetMbeanName);
}
=======
ObjectName facetMbeanName = new ObjectName(domainName,"name","FacetHandler-"+facetHandler.getName());
mbeanServer.registerMBean(facetMBean, facetMbeanName);
_registeredMBeans.add(facetMbeanName);
>>>>>>>
if (facetMBean!=null){
ObjectName facetMbeanName = new ObjectName(domainName,"name","FacetHandler-"+facetHandler.getName());
mbeanServer.registerMBean(facetMBean, facetMbeanName);
_registeredMBeans.add(facetMbeanName);
}
<<<<<<<
if (facetMBean!=null){
ObjectName facetMbeanName = new ObjectName("RuntimeFacetHandlerFactory","name",runtimeFacetHandlerFactory.getName());
mbeanServer.registerMBean(facetMBean, facetMbeanName);
_registeredMBeans.add(facetMbeanName);
}
=======
ObjectName facetMbeanName = new ObjectName(domainName,"name","RuntimeFacetHandlerFactory-"+runtimeFacetHandlerFactory.getName());
mbeanServer.registerMBean(facetMBean, facetMbeanName);
_registeredMBeans.add(facetMbeanName);
>>>>>>>
if (facetMBean!=null){
ObjectName facetMbeanName = new ObjectName(domainName,"name","RuntimeFacetHandlerFactory-"+runtimeFacetHandlerFactory.getName());
mbeanServer.registerMBean(facetMBean, facetMbeanName);
_registeredMBeans.add(facetMbeanName);
} |
<<<<<<<
private JsonTemplateProcessor jsonTemplateProcessor = new JsonTemplateProcessor();
=======
private Timer _statTimer;
>>>>>>>
private JsonTemplateProcessor jsonTemplateProcessor = new JsonTemplateProcessor();
private Timer _statTimer; |
<<<<<<<
private Timer _indexingLatencyTimer;
=======
private Meter _indexSizeMeter;
private long _lastMeasureTime;
private static final long MEASURE_INTERVAL = 1000 * 60; // 1 minute
>>>>>>>
private Meter _indexSizeMeter;
private long _lastMeasureTime;
private static final long MEASURE_INTERVAL = 1000 * 60; // 1 minute
private Timer _indexingLatencyTimer;
<<<<<<<
_indexingLatencyTimer = registerTimer("indexing-latency");
=======
_indexSizeMeter = registerMeter("index-size", "index-size");
>>>>>>>
_indexSizeMeter = registerMeter("index-size", "index-size");
_indexingLatencyTimer = registerTimer("indexing-latency"); |
<<<<<<<
=======
import com.senseidb.search.req.AbstractSenseiRequest;
import com.senseidb.search.req.AbstractSenseiResult;
import com.senseidb.search.req.SenseiSystemInfo;
import com.senseidb.servlet.AbstractSenseiClientServlet;
>>>>>>>
import com.senseidb.search.req.AbstractSenseiRequest;
import com.senseidb.search.req.AbstractSenseiResult;
import com.senseidb.search.req.SenseiSystemInfo;
import com.senseidb.servlet.AbstractSenseiClientServlet;
<<<<<<<
import com.senseidb.util.SenseiUncaughtExceptionHandler;
import com.senseidb.util.JSONUtil.FastJSONObject;
=======
import com.senseidb.util.SenseiUncaughtExceptionHandler;
>>>>>>>
import com.senseidb.util.SenseiUncaughtExceptionHandler;
import com.senseidb.util.JSONUtil.FastJSONObject;
<<<<<<<
private final JSONObject _schemaDoc;
private final SenseiSchema _senseiSchema;
private final SenseiGateway _gateway;
private PluggableSearchEngineManager pluggableSearchEngineManager;
private SenseiIndexReaderDecorator decorator;
=======
private final JSONObject _schemaDoc;
private final SenseiSchema _senseiSchema;
private final SenseiGateway _gateway;
private PluggableSearchEngineManager pluggableSearchEngineManager;
>>>>>>>
private final JSONObject _schemaDoc;
private final SenseiSchema _senseiSchema;
private final SenseiGateway _gateway;
private PluggableSearchEngineManager pluggableSearchEngineManager;
private SenseiIndexReaderDecorator decorator;
<<<<<<<
public Configuration getConfiguration() {
return _senseiConf;
}
public SenseiPluginRegistry getPluginRegistry() {
return pluginRegistry;
}
=======
public Configuration getConfiguration() {
return _senseiConf;
}
>>>>>>>
public Configuration getConfiguration() {
return _senseiConf;
}
<<<<<<<
//HashMap<String, String> initParam = new HashMap<String, String>();
//if (_senseiConfFile != null) {
//logger.info("Broker Configuration file: "+_senseiConfFile.getAbsolutePath());
//initParam.put("config.file", _senseiConfFile.getAbsolutePath());
//}
//senseiApp.setInitParams(initParam);
senseiApp.setAttribute("sensei.search.configuration", _senseiConf);
senseiApp.setAttribute(SenseiConfigServletContextListener.SENSEI_CONF_PLUGIN_REGISTRY, pluginRegistry);
senseiApp.setAttribute("sensei.search.version.comparator", _gateway != null ? _gateway.getVersionComparator() : ZoieConfig.DEFAULT_VERSION_COMPARATOR);
=======
//HashMap<String, String> initParam = new HashMap<String, String>();
//if (_senseiConfFile != null) {
//logger.info("Broker Configuration file: "+_senseiConfFile.getAbsolutePath());
//initParam.put("config.file", _senseiConfFile.getAbsolutePath());
//}
//senseiApp.setInitParams(initParam);
senseiApp.setAttribute("sensei.search.configuration", _senseiConf);
senseiApp.setAttribute("sensei.broker.export", new AbstractSenseiClientServlet.SenseiBrokerExport());
senseiApp.setAttribute(SenseiConfigServletContextListener.SENSEI_CONF_PLUGIN_REGISTRY, pluginRegistry);
senseiApp.setAttribute("sensei.search.version.comparator", _gateway.getVersionComparator());
>>>>>>>
//HashMap<String, String> initParam = new HashMap<String, String>();
//if (_senseiConfFile != null) {
//logger.info("Broker Configuration file: "+_senseiConfFile.getAbsolutePath());
//initParam.put("config.file", _senseiConfFile.getAbsolutePath());
//}
//senseiApp.setInitParams(initParam);
senseiApp.setAttribute("sensei.search.configuration", _senseiConf);
senseiApp.setAttribute("sensei.broker.export", new AbstractSenseiClientServlet.SenseiBrokerExport());
senseiApp.setAttribute(SenseiConfigServletContextListener.SENSEI_CONF_PLUGIN_REGISTRY, pluginRegistry);
senseiApp.setAttribute("sensei.search.version.comparator", _gateway != null ? _gateway.getVersionComparator() : ZoieConfig.DEFAULT_VERSION_COMPARATOR); |
<<<<<<<
private static Counter recoveredIndexInBoboFacetDataCache;
private static Counter facetMappingMismatch;
private ActivityPersistenceFactory activityPersistenceFactory;
static {
recoveredIndexInBoboFacetDataCache = Metrics.newCounter(new MetricName(CompositeActivityManager.class, "recoveredIndexInBoboFacetDataCache"));
facetMappingMismatch = Metrics.newCounter(new MetricName(CompositeActivityManager.class, "facetMappingMismatch"));
}
private BoboIndexTracker boboIndexTracker;
public CompositeActivityManager(ActivityPersistenceFactory activityPersistenceFactory) {
this.activityPersistenceFactory = activityPersistenceFactory;
}
=======
private Counter recoveredIndexInBoboFacetDataCache;
private Counter facetMappingMismatch;
private ActivityPersistenceFactory activityPersistenceFactory;
public CompositeActivityManager(ActivityPersistenceFactory activityPersistenceFactory) {
this.activityPersistenceFactory = activityPersistenceFactory;
}
>>>>>>>
private Counter recoveredIndexInBoboFacetDataCache;
private Counter facetMappingMismatch;
private ActivityPersistenceFactory activityPersistenceFactory;
private BoboIndexTracker boboIndexTracker;
public CompositeActivityManager(ActivityPersistenceFactory activityPersistenceFactory) {
this.activityPersistenceFactory = activityPersistenceFactory;
}
<<<<<<<
boboIndexTracker = new BoboIndexTracker();
boboIndexTracker.setSenseiCore(senseiCore);
Set<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>> zoieSystems = new HashSet<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>>();
=======
Set<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>> zoieSystems = new HashSet<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>>();
>>>>>>>
boboIndexTracker = new BoboIndexTracker();
boboIndexTracker.setSenseiCore(senseiCore);
Set<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>> zoieSystems = new HashSet<IndexReaderFactory<ZoieIndexReader<BoboIndexReader>>>();
<<<<<<<
senseiCore.getDecorator().addBoboListener(boboIndexTracker);
int purgeJobFrequencyInMinutes = activityPersistenceFactory.getActivityConfig().getPurgeJobFrequencyInMinutes();
purgeUnusedActivitiesJob = new PurgeUnusedActivitiesJob(activityValues, senseiCore, purgeJobFrequencyInMinutes * 60 * 1000);
=======
int purgeJobFrequencyInMinutes = activityPersistenceFactory.getActivityConfig().getPurgeJobFrequencyInMinutes();
purgeUnusedActivitiesJob = new PurgeUnusedActivitiesJob(activityValues, zoieSystems, purgeJobFrequencyInMinutes * 60 * 1000);
>>>>>>>
SenseiIndexReaderDecorator decorator = senseiCore.getDecorator();
decorator.addBoboListener(boboIndexTracker);
int purgeJobFrequencyInMinutes = activityPersistenceFactory.getActivityConfig().getPurgeJobFrequencyInMinutes();
purgeUnusedActivitiesJob = new PurgeUnusedActivitiesJob(activityValues, senseiCore, purgeJobFrequencyInMinutes * 60 * 1000);
<<<<<<<
ret.add(ActivityRangeFacetHandler.valueOf(facet.name, facet.column, this, getActivityValues().getActivityValues(facet.column)));
=======
ret.add(ActivityRangeFacetHandler.valueOf(facet.name, facet.column, getActivityValues(), getActivityValues().getActivityValues(facet.column)));
>>>>>>>
ret.add(ActivityRangeFacetHandler.valueOf(facet.name, facet.column, this, getActivityValues().getActivityValues(facet.column))); |
<<<<<<<
Engine(final RecordReader recordReader,
final List<RecordFilter> filterChain,
final RecordMapper recordMapper,
=======
private EventManager eventManager;
Engine(final RecordReader recordReader, final RecordFilter recordFilter, final RecordMapper recordMapper,
>>>>>>>
private EventManager eventManager;
Engine(final RecordReader recordReader,
final List<RecordFilter> filterChain,
final RecordMapper recordMapper,
<<<<<<<
//apply filter chain on the record, stop on first applied filter
boolean filtered = false;
for (RecordFilter recordFilter : filterChain) {
if (recordFilter.filterRecord(currentRecord)) {
filtered = true;
break;
}
}
if (filtered) {
=======
//filter record if any
boolean filterRecord = filterRecord(currentRecord);
if (filterRecord) {
>>>>>>>
//apply filter chain on the record
boolean filtered = filterRecord(currentRecord);
if (filtered) { |
<<<<<<<
import proj.zoie.api.ZoieVersion;
=======
import proj.zoie.api.Zoie;
>>>>>>>
import proj.zoie.api.ZoieVersion;
import proj.zoie.api.Zoie;
<<<<<<<
public SenseiIndexLoader getIndexLoader(int partition, ZoieSystem<?, T,V> zoie) {
final MemoryStreamDataProvider<T,V> memoryDataProvider = _memoryDataProviderMap
=======
public SenseiIndexLoader getIndexLoader(int partition, Zoie<BoboIndexReader, V> zoie) {
final MemoryStreamDataProvider<V> memoryDataProvider = _memoryDataProviderMap
>>>>>>>
public SenseiIndexLoader getIndexLoader(int partition, Zoie<BoboIndexReader, T, V> zoie) {
final MemoryStreamDataProvider<T,V> memoryDataProvider = _memoryDataProviderMap |
<<<<<<<
private final Integer repairCommandId; // received when triggering repair in Cassandra
private final long repairUnitId;
=======
>>>>>>>
<<<<<<<
public Integer getRepairCommandId() {
return repairCommandId;
}
public long getRepairUnitId() {
return repairUnitId;
}
=======
>>>>>>>
<<<<<<<
this.repairCommandId = builder.repairCommandId;
this.repairUnitId = builder.repairUnitId;
=======
>>>>>>>
<<<<<<<
private final long repairUnitId;
private State state;
=======
>>>>>>>
<<<<<<<
this.repairUnitId = repairUnitId;
this.state = State.NOT_STARTED;
=======
>>>>>>>
<<<<<<<
repairUnitId = original.repairUnitId;
=======
state = original.state;
coordinatorHost = original.coordinatorHost;
>>>>>>>
state = original.state;
coordinatorHost = original.coordinatorHost; |
<<<<<<<
=======
>>>>>>>
<<<<<<<
if (isStarting(oldState, newState)) {
return startRun(repairRun, table);
}
if (isPausing(oldState, newState)) {
return pauseRun(repairRun, table);
}
if (isResuming(oldState, newState)) {
return resumeRun(repairRun, table);
}
String errMsg = String.format("Transition %s->%s not supported.", oldState.toString(),
newState.toString());
=======
Optional<RepairUnit> repairUnit = storage.getRepairUnit(repairRun.get().getRepairUnitId());
if (!repairUnit.isPresent()) {
String errMsg = "repair unit with id " + repairRun.get().getRepairUnitId() + " not found";
>>>>>>>
Optional<RepairUnit> repairUnit = storage.getRepairUnit(repairRun.get().getRepairUnitId());
if (!repairUnit.isPresent()) {
String errMsg = "repair unit with id " + repairRun.get().getRepairUnitId() + " not found";
<<<<<<<
.runState(RepairRun.RunState.RUNNING)
.startTime(DateTime.now())
.build(repairRun.getId());
storage.updateRepairRun(updatedRun);
RepairRunner.startRepairRun(storage, repairRun.getId(), jmxFactory);
return Response.status(Response.Status.OK).entity(new RepairRunStatus(repairRun, table))
.build();
=======
.runState(RepairRun.RunState.RUNNING)
.startTime(DateTime.now())
.build(repairRun.getId());
if (!storage.updateRepairRun(updatedRun)) {
throw new RuntimeException("failed updating repair run " + updatedRun.getId());
}
RepairRunner.startRepairRun(storage, repairRun.getId(), jmxFactory);
return Response.status(Response.Status.OK).entity(new RepairRunStatus(repairRun, repairUnit))
.build();
>>>>>>>
.runState(RepairRun.RunState.RUNNING)
.startTime(DateTime.now())
.build(repairRun.getId());
if (!storage.updateRepairRun(updatedRun)) {
throw new RuntimeException("failed updating repair run " + updatedRun.getId());
}
RepairRunner.startRepairRun(storage, repairRun.getId(), jmxFactory);
return Response.status(Response.Status.OK).entity(new RepairRunStatus(repairRun, repairUnit))
.build(); |
<<<<<<<
final long TIME_END = 43l;
final String TEST_CLUSTER = "TestCluster";
IStorage storage = new MemoryStorage();
// place a dummy cluster into storage
storage.addCluster(new Cluster(TEST_CLUSTER, null, Collections.<String>singleton(null)));
=======
>>>>>>>
final String TEST_CLUSTER = "TestCluster";
IStorage storage = new MemoryStorage();
// place a dummy cluster into storage
storage.addCluster(new Cluster(TEST_CLUSTER, null, Collections.<String>singleton(null)));
<<<<<<<
// end the repair
DateTimeUtils.setCurrentMillisFixed(TIME_END);
RepairRun run = storage.getRepairRun(RUN_ID);
storage.updateRepairRun(run.with().runState(RepairRun.RunState.RUNNING).build(RUN_ID));
RepairRunner.startNewRepairRun(storage, RUN_ID, new JmxConnectionFactory() {
@Override
public JmxProxy create(Optional<RepairStatusHandler> handler, String host)
throws ReaperException {
return null;
}
});
Thread.sleep(200);
}
@Test
public void testHangingRepair() throws ReaperException, InterruptedException {
final String CLUSTER_NAME = "reaper";
final String KS_NAME = "reaper";
final String CF_NAME = "reaper";
final long TIME_RUN = 41l;
final long TIME_RERUN = 42l;
final double INTENSITY = 0.5f;
IStorage storage = new MemoryStorage();
storage.addCluster(new Cluster(CLUSTER_NAME, null, Collections.<String>singleton(null)));
ColumnFamily cf =
storage.addColumnFamily(new ColumnFamily.Builder(CLUSTER_NAME, KS_NAME, CF_NAME, 1, false));
DateTimeUtils.setCurrentMillisFixed(TIME_RUN);
RepairRun repairRun = storage.addRepairRun(
new RepairRun.Builder(CLUSTER_NAME, cf.getId(), RepairRun.RunState.NOT_STARTED,
DateTime.now(), INTENSITY));
storage.addRepairSegments(Collections.singleton(
new RepairSegment.Builder(repairRun.getId(), new RingRange(BigInteger.ZERO, BigInteger.ONE),
RepairSegment.State.NOT_STARTED)));
final JmxProxy jmx = mock(JmxProxy.class);
when(jmx.getClusterName()).thenReturn(CLUSTER_NAME);
when(jmx.isConnectionAlive()).thenReturn(true);
when(jmx.tokenRangeToEndpoint(anyString(), any(RingRange.class)))
.thenReturn(Lists.newArrayList(""));
final AtomicInteger repairAttempts = new AtomicInteger(0);
when(jmx.triggerRepair(any(BigInteger.class), any(BigInteger.class), anyString(), anyString()))
.then(new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
return repairAttempts.incrementAndGet();
}
});
RepairRunner.initializeThreadPool(1, 1);
final RepairRunner repairRunner = new RepairRunner(storage, 1, new JmxConnectionFactory() {
@Override
public JmxProxy create(Optional<RepairStatusHandler> handler, String host)
throws ReaperException {
return jmx;
}
});
assertEquals(storage.getRepairSegment(1).getState(), RepairSegment.State.NOT_STARTED);
assertEquals(0, repairAttempts.get());
repairRunner.run();
assertEquals(1, repairAttempts.get());
assertEquals(storage.getRepairSegment(1).getState(), RepairSegment.State.RUNNING);
repairRunner.handle(repairAttempts.get(), ActiveRepairService.Status.STARTED,
"Repair " + repairAttempts + " started");
assertEquals(DateTime.now(), storage.getRepairSegment(1).getStartTime());
assertEquals(RepairRun.RunState.RUNNING, storage.getRepairRun(1).getRunState());
Thread.sleep(1500);
assertEquals(2, repairAttempts.get());
assertEquals(storage.getRepairSegment(1).getState(), RepairSegment.State.RUNNING);
DateTimeUtils.setCurrentMillisFixed(TIME_RERUN);
repairRunner.handle(repairAttempts.get(), ActiveRepairService.Status.STARTED,
"Repair " + repairAttempts + " started");
assertEquals(DateTime.now(), storage.getRepairSegment(1).getStartTime());
assertEquals(RepairRun.RunState.RUNNING, storage.getRepairRun(1).getRunState());
repairRunner.handle(repairAttempts.get(), ActiveRepairService.Status.FINISHED,
"Repair " + repairAttempts + " finished");
Thread.sleep(100);
assertEquals(RepairRun.RunState.DONE, storage.getRepairRun(1).getRunState());
=======
// end time will also be set immediately
DateTime endTime = storage.getRepairRun(RUN_ID).getEndTime();
assertNotNull(endTime);
assertEquals(TIME_START, endTime.getMillis());
>>>>>>>
// end time will also be set immediately
DateTime endTime = storage.getRepairRun(RUN_ID).getEndTime();
assertNotNull(endTime);
assertEquals(TIME_START, endTime.getMillis());
}
@Test
public void testHangingRepair() throws ReaperException, InterruptedException {
final String CLUSTER_NAME = "reaper";
final String KS_NAME = "reaper";
final String CF_NAME = "reaper";
final long TIME_RUN = 41l;
final long TIME_RERUN = 42l;
final double INTENSITY = 0.5f;
IStorage storage = new MemoryStorage();
storage.addCluster(new Cluster(CLUSTER_NAME, null, Collections.<String>singleton(null)));
ColumnFamily cf =
storage.addColumnFamily(new ColumnFamily.Builder(CLUSTER_NAME, KS_NAME, CF_NAME, 1, false));
DateTimeUtils.setCurrentMillisFixed(TIME_RUN);
RepairRun repairRun = storage.addRepairRun(
new RepairRun.Builder(CLUSTER_NAME, cf.getId(), RepairRun.RunState.NOT_STARTED,
DateTime.now(), INTENSITY));
storage.addRepairSegments(Collections.singleton(
new RepairSegment.Builder(repairRun.getId(), new RingRange(BigInteger.ZERO, BigInteger.ONE),
RepairSegment.State.NOT_STARTED)), 1);
final JmxProxy jmx = mock(JmxProxy.class);
when(jmx.getClusterName()).thenReturn(CLUSTER_NAME);
when(jmx.isConnectionAlive()).thenReturn(true);
when(jmx.tokenRangeToEndpoint(anyString(), any(RingRange.class)))
.thenReturn(Lists.newArrayList(""));
final AtomicInteger repairAttempts = new AtomicInteger(0);
when(jmx.triggerRepair(any(BigInteger.class), any(BigInteger.class), anyString(), anyString()))
.then(new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
return repairAttempts.incrementAndGet();
}
});
RepairRunner.initializeThreadPool(1, 1);
final RepairRunner repairRunner = new RepairRunner(storage, 1, new JmxConnectionFactory() {
@Override
public JmxProxy create(Optional<RepairStatusHandler> handler, String host)
throws ReaperException {
return jmx;
}
});
assertEquals(storage.getRepairSegment(1).getState(), RepairSegment.State.NOT_STARTED);
assertEquals(0, repairAttempts.get());
repairRunner.run();
assertEquals(1, repairAttempts.get());
assertEquals(storage.getRepairSegment(1).getState(), RepairSegment.State.RUNNING);
repairRunner.handle(repairAttempts.get(), ActiveRepairService.Status.STARTED,
"Repair " + repairAttempts + " started");
assertEquals(DateTime.now(), storage.getRepairSegment(1).getStartTime());
assertEquals(RepairRun.RunState.RUNNING, storage.getRepairRun(1).getRunState());
Thread.sleep(1500);
assertEquals(2, repairAttempts.get());
assertEquals(storage.getRepairSegment(1).getState(), RepairSegment.State.RUNNING);
DateTimeUtils.setCurrentMillisFixed(TIME_RERUN);
repairRunner.handle(repairAttempts.get(), ActiveRepairService.Status.STARTED,
"Repair " + repairAttempts + " started");
assertEquals(DateTime.now(), storage.getRepairSegment(1).getStartTime());
assertEquals(RepairRun.RunState.RUNNING, storage.getRepairRun(1).getRunState());
repairRunner.handle(repairAttempts.get(), ActiveRepairService.Status.FINISHED,
"Repair " + repairAttempts + " finished");
Thread.sleep(100);
assertEquals(RepairRun.RunState.DONE, storage.getRepairRun(1).getRunState()); |
<<<<<<<
import com.google.common.collect.Sets;
=======
import com.google.common.collect.Sets;
>>>>>>>
import com.google.common.collect.Sets;
<<<<<<<
=======
import com.spotify.reaper.core.RepairUnit;
>>>>>>>
import com.spotify.reaper.core.RepairUnit;
<<<<<<<
=======
import com.spotify.reaper.cassandra.JmxConnectionFactory;
>>>>>>>
import com.spotify.reaper.cassandra.JmxConnectionFactory;
<<<<<<<
=======
>>>>>>>
<<<<<<<
* @return repair run ID in case of everything going well,
* and a status code 500 in case of errors.
=======
* Notice that query parameter "tables" can be a single String, or a comma-separated list
* of table names. If the "tables" parameter is omitted, and only the keyspace is defined,
* then created repair run will target all the tables in the keyspace.
*
* @return repair run ID in case of everything going well,
* and a status code 500 in case of errors.
>>>>>>>
* @return repair run ID in case of everything going well,
* and a status code 500 in case of errors.
<<<<<<<
Optional<Cluster> cluster = storage.getCluster(clusterName.get());
if (!cluster.isPresent()) {
return Response.status(Response.Status.NOT_FOUND).entity(
"no cluster found with name '" + clusterName + "'").build();
}
JmxProxy jmxProxy = jmxFactory.create(cluster.get().getSeedHosts().iterator().next());
Set<String> knownTables = jmxProxy.getTableNamesForKeyspace(keyspace.get());
if (knownTables.size() == 0) {
LOG.debug("no known tables for keyspace {} in cluster {}", keyspace.get(),
clusterName.get());
return Response.status(Response.Status.NOT_FOUND).entity(
"no column families found for keyspace").build();
}
jmxProxy.close();
Set<String> tableNames;
if (tableNamesParam.isPresent()) {
tableNames = Sets.newHashSet(COMMA_SEPARATED_LIST_SPLITTER.split(tableNamesParam.get()));
} else {
tableNames = knownTables;
}
Optional<RepairUnit> storedRepairUnit =
storage.getRepairUnit(clusterName.get(), keyspace.get(), tableNames);
RepairUnit theRepairUnit;
if (storedRepairUnit.isPresent()) {
if (segmentCount.isPresent()) {
LOG.warn("stored repair unit already exists, and segment count given, "
+ "which is thus ignored");
}
theRepairUnit = storedRepairUnit.get();
} else {
int segments = config.getSegmentCount();
if (segmentCount.isPresent()) {
LOG.debug("using given segment count {} instead of configured value {}",
segmentCount.get(), config.getSegmentCount());
segments = segmentCount.get();
}
LOG.info("create new repair unit for cluster '{}', keyspace '{}', and column families: {}",
clusterName.get(), keyspace.get(), tableNames);
theRepairUnit = storage.addRepairUnit(new RepairUnit.Builder(clusterName.get(),
keyspace.get(), tableNames, segments, config.getSnapshotRepair()));
}
RepairRun newRepairRun = registerRepairRun(cluster.get(), theRepairUnit, cause, owner.get());
=======
Cluster cluster = getCluster(clusterName.get());
JmxProxy jmxProxy = jmxFactory.create(cluster.getSeedHosts().iterator().next());
Set<String> knownTables = jmxProxy.getTableNamesForKeyspace(keyspace.get());
if (knownTables.size() == 0) {
LOG.debug("no known tables for keyspace {} in cluster {}", keyspace.get(),
clusterName.get());
return Response.status(Response.Status.NOT_FOUND).entity(
"no column families found for keyspace").build();
}
jmxProxy.close();
Set<String> tableNames;
if (tableNamesParam.isPresent()) {
tableNames = Sets.newHashSet(COMMA_SEPARATED_LIST_SPLITTER.split(tableNamesParam.get()));
} else {
tableNames = knownTables;
}
Optional<RepairUnit> storedRepairUnit =
storage.getRepairUnit(clusterName.get(), keyspace.get(), tableNames);
RepairUnit theRepairUnit;
if (storedRepairUnit.isPresent()) {
if (segmentCount.isPresent()) {
LOG.warn("stored repair unit already exists, and segment count given, "
+ "which is thus ignored");
}
theRepairUnit = storedRepairUnit.get();
} else {
int segments = config.getSegmentCount();
if (segmentCount.isPresent()) {
LOG.debug("using given segment count {} instead of configured value {}",
segmentCount.get(), config.getSegmentCount());
segments = segmentCount.get();
}
LOG.info("create new repair unit for cluster '{}', keyspace '{}', and column families: {}",
clusterName.get(), keyspace.get(), tableNames);
theRepairUnit = storage.addRepairUnit(new RepairUnit.Builder(clusterName.get(),
keyspace.get(), tableNames, segments, config.getSnapshotRepair()));
}
RepairRun newRepairRun = registerRepairRun(cluster, theRepairUnit, cause, owner.get());
>>>>>>>
Optional<Cluster> cluster = storage.getCluster(clusterName.get());
if (!cluster.isPresent()) {
return Response.status(Response.Status.NOT_FOUND).entity(
"no cluster found with name '" + clusterName + "'").build();
}
JmxProxy jmxProxy = jmxFactory.create(cluster.get().getSeedHosts().iterator().next());
Set<String> knownTables = jmxProxy.getTableNamesForKeyspace(keyspace.get());
if (knownTables.size() == 0) {
LOG.debug("no known tables for keyspace {} in cluster {}", keyspace.get(),
clusterName.get());
return Response.status(Response.Status.NOT_FOUND).entity(
"no column families found for keyspace").build();
}
jmxProxy.close();
Set<String> tableNames;
if (tableNamesParam.isPresent()) {
tableNames = Sets.newHashSet(COMMA_SEPARATED_LIST_SPLITTER.split(tableNamesParam.get()));
} else {
tableNames = knownTables;
}
Optional<RepairUnit> storedRepairUnit =
storage.getRepairUnit(clusterName.get(), keyspace.get(), tableNames);
RepairUnit theRepairUnit;
if (storedRepairUnit.isPresent()) {
if (segmentCount.isPresent()) {
LOG.warn("stored repair unit already exists, and segment count given, "
+ "which is thus ignored");
}
theRepairUnit = storedRepairUnit.get();
} else {
int segments = config.getSegmentCount();
if (segmentCount.isPresent()) {
LOG.debug("using given segment count {} instead of configured value {}",
segmentCount.get(), config.getSegmentCount());
segments = segmentCount.get();
}
LOG.info("create new repair unit for cluster '{}', keyspace '{}', and column families: {}",
clusterName.get(), keyspace.get(), tableNames);
theRepairUnit = storage.addRepairUnit(new RepairUnit.Builder(clusterName.get(),
keyspace.get(), tableNames, segments, config.getSnapshotRepair()));
}
RepairRun newRepairRun = registerRepairRun(cluster.get(), theRepairUnit, cause, owner.get());
<<<<<<<
return Response.status(Response.Status.OK).entity(new RepairRunStatus(repairRun, repairUnit))
.build();
=======
return Response.status(Response.Status.OK).entity(new RepairRunStatus(repairRun, repairUnit))
.build();
>>>>>>>
return Response.status(Response.Status.OK).entity(new RepairRunStatus(repairRun, repairUnit))
.build();
<<<<<<<
Optional<RepairRun> repairRun = storage.getRepairRun(repairRunId);
if (repairRun.isPresent()) {
return Response.ok().entity(getRepairRunStatus(repairRun.get())).build();
} else {
return Response.status(404).entity(
"repair run with id " + repairRunId + " doesn't exist").build();
=======
Optional<RepairRun> repairRun = storage.getRepairRun(repairRunId);
if (repairRun.isPresent()) {
return Response.ok().entity(getRepairRunStatus(repairRun.get())).build();
}
else {
return Response.status(404).entity(
"repair run with id " + repairRunId + " doesn't exist").build();
>>>>>>>
Optional<RepairRun> repairRun = storage.getRepairRun(repairRunId);
if (repairRun.isPresent()) {
return Response.ok().entity(getRepairRunStatus(repairRun.get())).build();
} else {
return Response.status(404).entity(
"repair run with id " + repairRunId + " doesn't exist").build();
<<<<<<<
=======
* @return cluster information for the given cluster name
* @throws ReaperException if cluster with given name is not found
*/
private Cluster getCluster(String clusterName) throws ReaperException {
Cluster cluster = storage.getCluster(clusterName);
if (cluster == null) {
throw new ReaperException(String.format("Cluster \"%s\" not found", clusterName));
}
return cluster;
}
/**
>>>>>>>
<<<<<<<
RepairUnit repairUnit) throws ReaperException {
List<RepairSegment.Builder> repairSegmentBuilders = Lists.newArrayList();
=======
RepairUnit table) throws ReaperException {
List <RepairSegment.Builder> repairSegmentBuilders = Lists.newArrayList();
>>>>>>>
RepairUnit repairUnit) throws ReaperException {
List<RepairSegment.Builder> repairSegmentBuilders = Lists.newArrayList();
<<<<<<<
boolean success = storage.addRepairSegments(repairSegmentBuilders, repairRun.getId());
if (!success) {
throw new ReaperException("failed adding repair segments to storage");
}
if (repairUnit.getSegmentCount() != tokenSegments.size()) {
LOG.debug("created segment amount differs from expected default {} != {}",
repairUnit.getSegmentCount(), tokenSegments.size());
// TODO: update the RepairUnit with new segment count
}
=======
boolean success = storage.addRepairSegments(repairSegmentBuilders, repairRun.getId());
if (!success) {
throw new ReaperException("failed adding repair segments to storage");
}
>>>>>>>
boolean success = storage.addRepairSegments(repairSegmentBuilders, repairRun.getId());
if (!success) {
throw new ReaperException("failed adding repair segments to storage");
}
if (repairUnit.getSegmentCount() != tokenSegments.size()) {
LOG.debug("created segment amount differs from expected default {} != {}",
repairUnit.getSegmentCount(), tokenSegments.size());
// TODO: update the RepairUnit with new segment count
}
<<<<<<<
Optional<RepairUnit> repairUnit = storage.getRepairUnit(repairRun.getRepairUnitId());
assert repairUnit.isPresent() : "no repair unit found with id: " + repairRun.getRepairUnitId();
RepairRunStatus repairRunStatus = new RepairRunStatus(repairRun, repairUnit.get());
=======
RepairUnit repairUnit = storage.getColumnFamily(repairRun.getRepairUnitId());
RepairRunStatus repairRunStatus = new RepairRunStatus(repairRun, repairUnit);
>>>>>>>
Optional<RepairUnit> repairUnit = storage.getRepairUnit(repairRun.getRepairUnitId());
assert repairUnit.isPresent() : "no repair unit found with id: " + repairRun.getRepairUnitId();
RepairRunStatus repairRunStatus = new RepairRunStatus(repairRun, repairUnit.get()); |
<<<<<<<
import android.app.AlarmManager;
=======
>>>>>>>
import android.app.AlarmManager;
<<<<<<<
import android.os.Build;
=======
>>>>>>>
import android.os.Build; |
<<<<<<<
=======
import com.jwoolston.android.uvc.interfaces.VideoControlInterface;
import com.jwoolston.android.uvc.interfaces.VideoStreamingInterface;
import com.jwoolston.android.uvc.requests.PowerModeControl;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
>>>>>>>
import com.jwoolston.android.uvc.interfaces.VideoControlInterface;
import com.jwoolston.android.uvc.interfaces.VideoStreamingInterface;
import com.jwoolston.android.uvc.requests.PowerModeControl;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
<<<<<<<
WebcamConnection(@NonNull Context context, @NonNull android.hardware.usb.UsbDevice usbDevice)
throws UnknownDeviceException, DevicePermissionDenied {
this.usbManager = new UsbManager(context);
=======
private List<InterfaceAssociationDescriptor> iads;
WebcamConnection(Context context, UsbManager usbManager, UsbDevice usbDevice) throws UnknownDeviceException {
this.usbManager = usbManager;
this.usbDevice = usbDevice;
>>>>>>>
private List<InterfaceAssociationDescriptor> iads;
WebcamConnection(@NonNull Context context, @NonNull android.hardware.usb.UsbDevice usbDevice)
throws UnknownDeviceException, DevicePermissionDenied {
this.usbManager = new UsbManager(context);
<<<<<<<
Timber.d("Initializing native layer.");
usbDeviceConnection = usbManager.registerDevice(usbDevice);
=======
usbDeviceConnection = usbManager.openDevice(usbDevice);
usbDeviceConnection.claimInterface(usbInterfaceControl, true);
//TODO: Throw exception if unable to claim interface
>>>>>>>
Timber.d("Initializing native layer.");
usbDeviceConnection = usbManager.registerDevice(usbDevice);
<<<<<<<
=======
Timber.d("Initializing native layer.");
Timber.d("Attempting to select zero bandwidth stream interface.");
//iads.get(0).getInterface(1).selectAlternateSetting(usbDeviceConnection, 0);
VideoStreamingInterface streamingInterface = (VideoStreamingInterface) iads.get(0).getInterface(1);
streamingInterface.selectAlternateSetting(usbDeviceConnection, 0);
//util.selectAlternateSetting(streamingInterface.getInterfaceNumber(), 0);
//clearStall(usbInterfaceControl.getEndpoint(0));
Timber.d("Attempting to set current power mode.");
final PowerModeControl control = PowerModeControl.getInfoPowerMode(
(VideoControlInterface) iads.get(0).getInterface(0));
Timber.v("Request: " + control);
/*int retval = usbDeviceConnection.controlTransfer(control.getRequestType(), control.getRequest(), control
.getValue(), control.getIndex(), control.getData(), control.getLength(), 500);*/
/*int retval = util.controlTransfer(control.getRequestType(), control.getRequest(), control
.getValue(), control.getIndex(), control.getData(), control.getLength(), 1000);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
retval = util.controlTransfer(control.getRequestType(), control.getRequest(), control
.getValue(), control.getIndex(), control.getData(), control.getLength(), 1000);*/
/*Log.v(TAG, "Control transfer length: " + retval);
clearStall(usbInterfaceControl.getEndpoint(0));
retval = usbDeviceConnection.controlTransfer(control.getRequestType(), control.getRequest(), control
.getValue(), control.getIndex(), control.getData(), control.getLength(), 500);
Log.v(TAG, "Control transfer length: " + retval);*/
>>>>>>>
Timber.d("Attempting to select zero bandwidth stream interface.");
//iads.get(0).getInterface(1).selectAlternateSetting(usbDeviceConnection, 0);
VideoStreamingInterface streamingInterface = (VideoStreamingInterface) iads.get(0).getInterface(1);
streamingInterface.selectAlternateSetting(usbDeviceConnection, 0);
//util.selectAlternateSetting(streamingInterface.getInterfaceNumber(), 0);
//clearStall(usbInterfaceControl.getEndpoint(0));
Timber.d("Attempting to set current power mode.");
final PowerModeControl control = PowerModeControl.getInfoPowerMode(
(VideoControlInterface) iads.get(0).getInterface(0));
Timber.v("Request: " + control);
/*int retval = usbDeviceConnection.controlTransfer(control.getRequestType(), control.getRequest(), control
.getValue(), control.getIndex(), control.getData(), control.getLength(), 500);*/
/*int retval = util.controlTransfer(control.getRequestType(), control.getRequest(), control
.getValue(), control.getIndex(), control.getData(), control.getLength(), 1000);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
retval = util.controlTransfer(control.getRequestType(), control.getRequest(), control
.getValue(), control.getIndex(), control.getData(), control.getLength(), 1000);*/
/*Log.v(TAG, "Control transfer length: " + retval);
clearStall(usbInterfaceControl.getEndpoint(0));
retval = usbDeviceConnection.controlTransfer(control.getRequestType(), control.getRequest(), control
.getValue(), control.getIndex(), control.getData(), control.getLength(), 500);
Log.v(TAG, "Control transfer length: " + retval);*/ |
<<<<<<<
import com.jwoolston.android.libusb.UsbDeviceConnection;
import com.jwoolston.android.libusb.UsbInterface;
=======
import android.hardware.usb.UsbDevice;
import android.hardware.usb.UsbInterface;
import android.support.annotation.NonNull;
>>>>>>>
import android.support.annotation.NonNull;
import com.jwoolston.android.libusb.UsbDeviceConnection;
import com.jwoolston.android.libusb.UsbInterface;
<<<<<<<
=======
import java.util.LinkedList;
import java.util.List;
>>>>>>>
import java.util.LinkedList;
import java.util.List;
<<<<<<<
public static VideoControlInterface parseVideoControlInterface(UsbDeviceConnection connection, byte[] descriptor)
throws IllegalArgumentException {
=======
private List<VideoInputTerminal> inputTerminals = new LinkedList<>();
private List<VideoOutputTerminal> outputTerminals = new LinkedList<>();
private List<VideoUnit> units = new LinkedList<>();
public static VideoControlInterface parseVideoControlInterface(UsbDevice device, byte[] descriptor) throws IllegalArgumentException {
>>>>>>>
private List<VideoInputTerminal> inputTerminals = new LinkedList<>();
private List<VideoOutputTerminal> outputTerminals = new LinkedList<>();
private List<VideoUnit> units = new LinkedList<>();
public static VideoControlInterface parseVideoControlInterface(UsbDeviceConnection connection, byte[] descriptor)
throws IllegalArgumentException {
<<<<<<<
final UsbInterface usbInterface = UvcInterface.getUsbInterface(connection, descriptor);
=======
final UsbInterface usbInterface = UvcInterface.getUsbInterface(device, descriptor);
>>>>>>>
final UsbInterface usbInterface = UvcInterface.getUsbInterface(connection, descriptor);
<<<<<<<
Timber.d("Parsing Video Class Interface header.");
if (descriptor.length < VIDEO_CLASS_HEADER_LENGTH) {
throw new IllegalArgumentException("The provided descriptor is not a valid Video Class Interface.");
}
=======
if (descriptor.length < VIDEO_CLASS_HEADER_LENGTH) throw new IllegalArgumentException("The provided descriptor is not a valid Video Class Interface.");
>>>>>>>
Timber.d("Parsing Video Class Interface header.");
if (descriptor.length < VIDEO_CLASS_HEADER_LENGTH) {
throw new IllegalArgumentException("The provided descriptor is not a valid Video Class Interface.");
} |
<<<<<<<
import android.util.Log;
import com.jwoolston.android.libusb.UsbDevice;
import com.jwoolston.android.libusb.UsbInterface;
=======
import android.hardware.usb.UsbDevice;
import android.hardware.usb.UsbInterface;
>>>>>>>
import com.jwoolston.android.libusb.UsbDevice;
import com.jwoolston.android.libusb.UsbInterface; |
<<<<<<<
import android.support.annotation.NonNull;
import android.util.Log;
=======
>>>>>>>
<<<<<<<
=======
Timber.d("Parsing Video Class Interface header.");
>>>>>>>
<<<<<<<
=======
Timber.d("Parsing Video Class Interface Terminal.");
>>>>>>>
<<<<<<<
inputTerminals.add(cameraTerminal);
=======
Timber.d("%s", cameraTerminal);
>>>>>>>
inputTerminals.add(cameraTerminal);
<<<<<<<
inputTerminals.add(inputTerminal);
=======
Timber.d("%s", inputTerminal);
>>>>>>>
inputTerminals.add(inputTerminal);
<<<<<<<
outputTerminals.add(outputTerminal);
=======
Timber.d("%s", outputTerminal);
>>>>>>>
outputTerminals.add(outputTerminal);
<<<<<<<
=======
Timber.d("Parsing Video Class Interface Unit.");
>>>>>>>
<<<<<<<
units.add(selectorUnit);
=======
Timber.d("%s", selectorUnit);
>>>>>>>
units.add(selectorUnit);
<<<<<<<
units.add(processingUnit);
=======
Timber.d("%s", processingUnit);
>>>>>>>
units.add(processingUnit);
<<<<<<<
units.add(encodingUnit);
=======
Timber.d("%s", encodingUnit);
>>>>>>>
units.add(encodingUnit); |
<<<<<<<
DateMapper mapper = new DateMapper("name", null, null, null, PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("name", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("name", null, null, null, PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("name", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("name", null, null, null, PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("name", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("name", null, null, null, PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("name", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("name", null, null, null, PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("name", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("name", null, null, null, PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("name", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("name", null, null, null, PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("name", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("name", null, null, null, PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("name", null, null, null, TIMESTAMP_PATTERN); |
<<<<<<<
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", null);
=======
DateRangeMapper mapper = new DateRangeMapper("field", "from", "to", TIMESTAMP_PATTERN);
>>>>>>>
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", TIMESTAMP_PATTERN);
<<<<<<<
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", null);
=======
DateRangeMapper mapper = new DateRangeMapper("field", "from", "to", TIMESTAMP_PATTERN);
>>>>>>>
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", TIMESTAMP_PATTERN);
<<<<<<<
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", null);
=======
DateRangeMapper mapper = new DateRangeMapper("field", "from", "to", TIMESTAMP_PATTERN);
>>>>>>>
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", TIMESTAMP_PATTERN);
<<<<<<<
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", null);
=======
DateRangeMapper mapper = new DateRangeMapper("field", "from", "to", TIMESTAMP_PATTERN);
>>>>>>>
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", TIMESTAMP_PATTERN);
<<<<<<<
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", "yyyy-MM-dd");
=======
DateRangeMapper mapper = new DateRangeMapper("field", "from", "to", SHORT_PATTERN);
>>>>>>>
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", SHORT_PATTERN);
<<<<<<<
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", null);
=======
DateRangeMapper mapper = new DateRangeMapper("field", "from", "to", TIMESTAMP_PATTERN);
>>>>>>>
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", TIMESTAMP_PATTERN);
<<<<<<<
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", null);
=======
DateRangeMapper mapper = new DateRangeMapper("field", "from", "to", TIMESTAMP_PATTERN);
>>>>>>>
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", TIMESTAMP_PATTERN);
<<<<<<<
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", null);
=======
DateRangeMapper mapper = new DateRangeMapper("field", "from", "to", TIMESTAMP_PATTERN);
>>>>>>>
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", TIMESTAMP_PATTERN);
<<<<<<<
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", null);
=======
DateRangeMapper mapper = new DateRangeMapper("field", "from", "to", TIMESTAMP_PATTERN);
>>>>>>>
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", TIMESTAMP_PATTERN);
<<<<<<<
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", null);
=======
DateRangeMapper mapper = new DateRangeMapper("field", "from", "to", TIMESTAMP_PATTERN);
>>>>>>>
DateRangeMapper mapper = new DateRangeMapper("name", "from", "to", TIMESTAMP_PATTERN); |
<<<<<<<
import org.apache.cassandra.config.Config;
import org.apache.cassandra.config.DatabaseDescriptor;
=======
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNotNull;
import java.util.List;
>>>>>>>
import org.apache.cassandra.config.Config;
import org.apache.cassandra.config.DatabaseDescriptor;
<<<<<<<
import java.util.List;
import static junit.framework.Assert.*;
=======
>>>>>>>
import java.util.List;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNotNull;
<<<<<<<
private static final Murmur3Partitioner partitioner = new Murmur3Partitioner();
private static final TokenMapperMurmur mapper = new TokenMapperMurmur();
@BeforeClass
public static void beforeClass() {
Config.setClientMode(true);
DatabaseDescriptor.setPartitioner(partitioner);
}
@Test
public void testInstance() {
assertEquals("Expected Murmur mapper", TokenMapperMurmur.class, TokenMapper.instance().getClass());
}
=======
>>>>>>>
private static final Murmur3Partitioner partitioner = new Murmur3Partitioner();
private static final TokenMapperMurmur mapper = new TokenMapperMurmur();
@BeforeClass
public static void beforeClass() {
Config.setClientMode(true);
DatabaseDescriptor.setPartitioner(partitioner);
}
@Test
public void testInstance() {
assertEquals("Expected Murmur mapper", TokenMapperMurmur.class, TokenMapper.instance().getClass());
}
<<<<<<<
private static DecoratedKey decoratedKey(String value) {
return partitioner.decorateKey(UTF8Type.instance.decompose(value));
}
private static Token token(String value) {
return decoratedKey(value).getToken();
}
}
=======
}
>>>>>>>
private static DecoratedKey decoratedKey(String value) {
return partitioner.decorateKey(UTF8Type.instance.decompose(value));
}
private static Token token(String value) {
return decoratedKey(value).getToken();
}
} |
<<<<<<<
=======
import org.apache.cassandra.cli.CliParser;
>>>>>>>
<<<<<<<
=======
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.NumericRangeQuery;
>>>>>>>
import org.apache.lucene.search.NumericRangeQuery; |
<<<<<<<
DateMapper mapper = new DateMapper("field", null, null, TIMESTAMP_PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("field", null, null, TIMESTAMP_PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("field", null, null, TIMESTAMP_PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("field", null, null, TIMESTAMP_PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("field", null, null, TIMESTAMP_PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("field", null, null, TIMESTAMP_PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("field", null, null, TIMESTAMP_PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("field", null, null, TIMESTAMP_PATTERN);
=======
DateMapper mapper = new DateMapper("field", null, null, null, PATTERN);
>>>>>>>
DateMapper mapper = new DateMapper("field", null, null, null, TIMESTAMP_PATTERN);
<<<<<<<
DateMapper mapper = new DateMapper("field", null, null,null);
=======
DateMapper mapper = new DateMapper("field", null, null, null, null);
>>>>>>>
DateMapper mapper = new DateMapper("field", null, null, null, null); |
<<<<<<<
import org.smartdata.server.ServerContext;
=======
import org.smartdata.server.cmdlet.message.ActionStatusReport;
import org.smartdata.server.cmdlet.message.CmdletStatusUpdate;
>>>>>>>
import org.smartdata.server.ServerContext;
import org.smartdata.server.cmdlet.message.ActionStatusReport;
import org.smartdata.server.cmdlet.message.CmdletStatusUpdate; |
<<<<<<<
Integer mergeRequestIid, String targetBranch, String targetRepoName, String targetNamespace, String targetRepoSshUrl,
String targetRepoHttpUrl, String triggeredByUser, String before, String after, String lastCommit, String targetProjectUrl,
String triggerPhrase, String mergeRequestState, String mergedByUser, String mergeRequestAssignee, String ref, String isTag,
String sha, String beforeSha, String status, String stages, String createdAt, String finishedAt, String buildDuration) {
=======
Integer mergeRequestIid, Integer mergeRequestTargetProjectId, String targetBranch, String targetRepoName, String targetNamespace,
String targetRepoSshUrl, String targetRepoHttpUrl, String triggeredByUser, String before, String after, String lastCommit,
String targetProjectUrl, String triggerPhrase, String mergeRequestState, String mergedByUser, String mergeRequestAssignee) {
>>>>>>>
Integer mergeRequestIid, Integer mergeRequestTargetProjectId, String targetBranch, String targetRepoName, String targetNamespace, String targetRepoSshUrl,
String targetRepoHttpUrl, String triggeredByUser, String before, String after, String lastCommit, String targetProjectUrl,
String triggerPhrase, String mergeRequestState, String mergedByUser, String mergeRequestAssignee, String ref, String isTag,
String sha, String beforeSha, String status, String stages, String createdAt, String finishedAt, String buildDuration) { |
<<<<<<<
private boolean acceptMergeRequestOnSuccess = true;
=======
private boolean addVoteOnMergeRequest = true;
>>>>>>>
private boolean addVoteOnMergeRequest = true;
<<<<<<<
public GitLabPushTrigger(boolean triggerOnPush, boolean triggerOnMergeRequest, boolean triggerOpenMergeRequestOnPush, boolean setBuildDescription, boolean acceptMergeRequestOnSuccess, boolean allowAllBranches, List<String> allowedBranches) {
=======
public GitLabPushTrigger(boolean triggerOnPush, boolean triggerOnMergeRequest, String triggerOpenMergeRequestOnPush, boolean ciSkip, boolean setBuildDescription, boolean addNoteOnMergeRequest, boolean addVoteOnMergeRequest, boolean allowAllBranches,
String includeBranchesSpec, String excludeBranchesSpec) {
>>>>>>>
public GitLabPushTrigger(boolean triggerOnPush, boolean triggerOnMergeRequest, String triggerOpenMergeRequestOnPush, boolean ciSkip, boolean setBuildDescription, boolean addNoteOnMergeRequest, boolean addVoteOnMergeRequest, boolean acceptMergeRequestOnSuccess, boolean allowAllBranches,
String includeBranchesSpec, String excludeBranchesSpec) {
<<<<<<<
this.allowedBranches = allowedBranches;
this.acceptMergeRequestOnSuccess = acceptMergeRequestOnSuccess;
=======
this.includeBranchesSpec = includeBranchesSpec;
this.excludeBranchesSpec = excludeBranchesSpec;
>>>>>>>
this.includeBranchesSpec = includeBranchesSpec;
this.excludeBranchesSpec = excludeBranchesSpec;
this.acceptMergeRequestOnSuccess = acceptMergeRequestOnSuccess;
<<<<<<<
private void onCompleteMergeRequest(AbstractBuild abstractBuild,GitLabMergeCause cause){
if(acceptMergeRequestOnSuccess && abstractBuild.getResult() == Result.SUCCESS) {
try {
GitlabProject proj = new GitlabProject();
proj.setId(cause.getMergeRequest().getObjectAttribute().getTargetProjectId());
this.getDescriptor().getGitlab().instance().acceptMergeRequest(
proj,
cause.getMergeRequest().getObjectAttribute().getId(),
"Merge Request accepted by jenkins build success");
} catch (IOException e) {
e.printStackTrace();
}
}
=======
private void onCompleteMergeRequest(Run run,GitLabMergeCause cause){
>>>>>>>
private void onCompleteMergeRequest(Run run,GitLabMergeCause cause){
if(acceptMergeRequestOnSuccess && abstractBuild.getResult() == Result.SUCCESS) {
try {
GitlabProject proj = new GitlabProject();
proj.setId(cause.getMergeRequest().getObjectAttribute().getTargetProjectId());
this.getDescriptor().getGitlab().instance().acceptMergeRequest(
proj,
cause.getMergeRequest().getObjectAttribute().getId(),
"Merge Request accepted by jenkins build success");
} catch (IOException e) {
e.printStackTrace();
}
} |
<<<<<<<
public GitLabPushTrigger(boolean triggerOnPush, boolean triggerToBranchDeleteRequest, boolean triggerOnMergeRequest, boolean triggerOnlyIfNewCommitsPushed, boolean triggerOnAcceptedMergeRequest, boolean triggerOnClosedMergeRequest,
TriggerOpenMergeRequest triggerOpenMergeRequestOnPush, boolean triggerOnNoteRequest, String noteRegex,
boolean skipWorkInProgressMergeRequest, boolean ciSkip,
=======
public GitLabPushTrigger(boolean triggerOnPush, boolean triggerToBranchDeleteRequest, boolean triggerOnMergeRequest, boolean triggerOnAcceptedMergeRequest, boolean triggerOnClosedMergeRequest,
TriggerOpenMergeRequest triggerOpenMergeRequestOnPush, boolean triggerOnNoteRequest, String noteRegex,
boolean skipWorkInProgressMergeRequest, boolean ciSkip, String labelsThatForcesBuildIfAdded,
>>>>>>>
public GitLabPushTrigger(boolean triggerOnPush, boolean triggerToBranchDeleteRequest, boolean triggerOnMergeRequest, boolean triggerOnlyIfNewCommitsPushed, boolean triggerOnAcceptedMergeRequest, boolean triggerOnClosedMergeRequest,
TriggerOpenMergeRequest triggerOpenMergeRequestOnPush, boolean triggerOnNoteRequest, String noteRegex,
boolean skipWorkInProgressMergeRequest, boolean ciSkip, String labelsThatForcesBuildIfAdded, |
<<<<<<<
import constants.LanguageConstants;
=======
>>>>>>>
<<<<<<<
=======
public ScheduledFuture<?> pqMapleMap;
public ScheduledFuture<?> ariantScore;
>>>>>>>
<<<<<<<
=======
if (mItem.getItemId() == 4031868) {
updateAriantScore();
getMap().broadcastMessage(MaplePacketCreator.updateAriantPQRanking(this.getName(), this.getItemQuantity(4031868, false), false));
}
>>>>>>>
<<<<<<<
if (map.unclaimOwnership(this)) {
map.dropMessage(5, "This lawn is now free real estate.");
}
=======
cancelPqMapleMap();
cancelAriantScore();
}
public void cancelPqMapleMap() {
if (pqMapleMap != null) {
pqMapleMap.cancel(true);
pqMapleMap = null;
}
}
public void cancelAriantScore() {
if (ariantScore != null) {
ariantScore.cancel(true);
ariantScore = null;
}
>>>>>>>
if (map.unclaimOwnership(this)) {
map.dropMessage(5, "This lawn is now free real estate.");
}
AriantColiseum arena = this.getAriantColiseum();
if (arena != null) {
arena.leaveArena(this);
}
<<<<<<<
=======
public void obterItens() {
for (Item item : getItem()) {
getClient().getAbstractPlayerInteraction().gainItem(item.getItemId(), item.getQuantity());
}
}
public int getAriantScore() {
return this.countItem(4031868);
}
public void updateAriantScore() {
this.getMap().broadcastMessage(MaplePacketCreator.updateAriantPQRanking(this.getName(), getAriantScore(), false));
}
public void disease(int type, int level) {
if (MapleDisease.getBySkill(type) == null) {
return;
}
giveDebuff(MapleDisease.getBySkill(type), MobSkillFactory.getMobSkill(type, level));
}
public void shield() {
List<Pair<MapleBuffStat, Integer>> ldsstat = Collections.singletonList(new Pair<MapleBuffStat, Integer>(MapleBuffStat.ARIANT_PQ_SHIELD, 1));
getMap().broadcastMessage(this, MaplePacketCreator.giveForeignBuff(id, ldsstat), false);
}
public ScheduledFuture<?> getPqMapleMap() {
return pqMapleMap;
}
public void setPqMapleMap(ScheduledFuture<?> pqMapleMap) {
this.pqMapleMap = pqMapleMap;
}
public ScheduledFuture<?> getAriantScoreBord() {
return ariantScore;
}
public void setAriantScore(ScheduledFuture<?> ariantScore) {
this.ariantScore = ariantScore;
}
>>>>>>> |
<<<<<<<
private MapleCharacter mapOwner = null;
private long mapOwnerLastActivityTime = Long.MAX_VALUE;
=======
>>>>>>>
private MapleCharacter mapOwner = null;
private long mapOwnerLastActivityTime = Long.MAX_VALUE;
<<<<<<<
if (chr.getChalkboard() != null) {
if (!GameConstants.isFreeMarketRoom(mapid)) {
chr.announce(MaplePacketCreator.useChalkboard(chr, false)); // update player's chalkboard when changing maps found thanks to Vcoc
} else {
chr.setChalkboard(null);
}
}
=======
>>>>>>>
if (chr.getChalkboard() != null) {
if (!GameConstants.isFreeMarketRoom(mapid)) {
chr.announce(MaplePacketCreator.useChalkboard(chr, false)); // update player's chalkboard when changing maps found thanks to Vcoc
} else {
chr.setChalkboard(null);
}
}
<<<<<<<
public MaplePortal findClosestWarpPortal(Point from) {
MaplePortal closest = null;
double shortestDistance = Double.POSITIVE_INFINITY;
for (MaplePortal portal : portals.values()) {
double distance = portal.getPosition().distanceSq(from);
if (portal.getType() == MaplePortal.MAP_PORTAL && distance < shortestDistance && portal.getTargetMapId() == 999999999) {
closest = portal;
shortestDistance = distance;
}
}
return closest;
}
=======
>>>>>>>
public MaplePortal findClosestWarpPortal(Point from) {
MaplePortal closest = null;
double shortestDistance = Double.POSITIVE_INFINITY;
for (MaplePortal portal : portals.values()) {
double distance = portal.getPosition().distanceSq(from);
if (portal.getType() == MaplePortal.MAP_PORTAL && distance < shortestDistance && portal.getTargetMapId() == 999999999) {
closest = portal;
shortestDistance = distance;
}
}
return closest;
}
<<<<<<<
public boolean claimOwnership(MapleCharacter chr) {
if (mapOwner == null) {
mapOwner = chr;
mapOwnerLastActivityTime = Server.getInstance().getCurrentTime();
getChannelServer().registerOwnedMap(this);
return true;
} else {
return chr == mapOwner;
}
}
public boolean unclaimOwnership(MapleCharacter chr) {
if (mapOwner == chr) {
mapOwner = null;
mapOwnerLastActivityTime = Long.MAX_VALUE;
getChannelServer().unregisterOwnedMap(this);
return true;
} else {
return false;
}
}
private void refreshOwnership() {
mapOwnerLastActivityTime = Server.getInstance().getCurrentTime();
}
public boolean isOwnershipRestricted(MapleCharacter chr) {
MapleCharacter owner = mapOwner;
if (owner != null) {
if (owner != chr && !owner.isPartyMember(chr)) { // thanks Vcoc & BHB for suggesting the map ownership feature
chr.showMapOwnershipInfo(owner);
return true;
} else {
this.refreshOwnership();
}
}
return false;
}
public void checkMapOwnerActivity() {
long timeNow = Server.getInstance().getCurrentTime();
if (timeNow - mapOwnerLastActivityTime > 60000) {
if (unclaimOwnership(mapOwner)) {
this.dropMessage(5, "This lawn is now free real estate.");
}
}
}
=======
>>>>>>>
public boolean claimOwnership(MapleCharacter chr) {
if (mapOwner == null) {
mapOwner = chr;
mapOwnerLastActivityTime = Server.getInstance().getCurrentTime();
getChannelServer().registerOwnedMap(this);
return true;
} else {
return chr == mapOwner;
}
}
public boolean unclaimOwnership(MapleCharacter chr) {
if (mapOwner == chr) {
mapOwner = null;
mapOwnerLastActivityTime = Long.MAX_VALUE;
getChannelServer().unregisterOwnedMap(this);
return true;
} else {
return false;
}
}
private void refreshOwnership() {
mapOwnerLastActivityTime = Server.getInstance().getCurrentTime();
}
public boolean isOwnershipRestricted(MapleCharacter chr) {
MapleCharacter owner = mapOwner;
if (owner != null) {
if (owner != chr && !owner.isPartyMember(chr)) { // thanks Vcoc & BHB for suggesting the map ownership feature
chr.showMapOwnershipInfo(owner);
return true;
} else {
this.refreshOwnership();
}
}
return false;
}
public void checkMapOwnerActivity() {
long timeNow = Server.getInstance().getCurrentTime();
if (timeNow - mapOwnerLastActivityTime > 60000) {
if (unclaimOwnership(mapOwner)) {
this.dropMessage(5, "This lawn is now free real estate.");
}
}
}
private final List<Point> takenSpawns = new LinkedList<>();
private final List<GuardianSpawnPoint> guardianSpawns = new LinkedList<>();
private final List<MCSkill> blueTeamBuffs = new ArrayList();
private final List<MCSkill> redTeamBuffs = new ArrayList();
private List<Integer> skillIds = new ArrayList();
private List<Pair<Integer, Integer>> mobsToSpawn = new ArrayList();
public List<MCSkill> getBlueTeamBuffs() {
return blueTeamBuffs;
}
public List<MCSkill> getRedTeamBuffs() {
return redTeamBuffs;
}
public void clearBuffList() {
redTeamBuffs.clear();
blueTeamBuffs.clear();
}
public List<MapleMapObject> getAllPlayer() {
return getMapObjectsInRange(new Point(0, 0), Double.POSITIVE_INFINITY, Arrays.asList(MapleMapObjectType.PLAYER));
}
public boolean isCPQMap() {
switch (this.getId()) {
case 980000101:
case 980000201:
case 980000301:
case 980000401:
case 980000501:
case 980000601:
case 980031100:
case 980032100:
case 980033100:
return true;
}
return false;
}
public boolean isCPQMap2() {
switch (this.getId()) {
case 980031100:
case 980032100:
case 980033100:
return true;
}
return false;
}
public boolean isCPQLobby() {
switch (this.getId()) {
case 980000100:
case 980000200:
case 980000300:
case 980000400:
case 980000500:
case 980000600:
return true;
}
return false;
}
public boolean isBlueCPQMap() {
switch (this.getId()) {
case 980000501:
case 980000601:
case 980031200:
case 980032200:
case 980033200:
return true;
}
return false;
}
public boolean isPurpleCPQMap() {
switch (this.getId()) {
case 980000301:
case 980000401:
case 980031200:
case 980032200:
case 980033200:
return true;
}
return false;
}
public Point getRandomSP(int team) {
if (takenSpawns.size() > 0) {
for (SpawnPoint sp : monsterSpawn) {
for (Point pt : takenSpawns) {
if ((sp.getPosition().x == pt.x && sp.getPosition().y == pt.y) || (sp.getTeam() != team && !this.isBlueCPQMap())) {
continue;
} else {
takenSpawns.add(pt);
return sp.getPosition();
}
}
}
} else {
for (SpawnPoint sp : monsterSpawn) {
if (sp.getTeam() == team || this.isBlueCPQMap()) {
takenSpawns.add(sp.getPosition());
return sp.getPosition();
}
}
}
return null;
}
public GuardianSpawnPoint getRandomGuardianSpawn(int team) {
boolean alltaken = false;
for (GuardianSpawnPoint a : this.guardianSpawns) {
if (!a.isTaken()) {
alltaken = false;
break;
}
}
if (alltaken) {
return null;
}
if (this.guardianSpawns.size() > 0) {
while (true) {
for (GuardianSpawnPoint gsp : this.guardianSpawns) {
if (!gsp.isTaken() && Math.random() < 0.3 && (gsp.getTeam() == -1 || gsp.getTeam() == team)) {
return gsp;
}
}
}
}
return null;
}
public void addGuardianSpawnPoint(GuardianSpawnPoint a) {
this.guardianSpawns.add(a);
}
public int spawnGuardian(int team, int num) {
try {
if (team == 0 && redTeamBuffs.size() >= 4 || team == 1 && blueTeamBuffs.size() >= 4) {
return 2;
}
final MCSkill skil = MapleCarnivalFactory.getInstance().getGuardian(num);
if (team == 0 && redTeamBuffs.contains(skil)) {
return 0;
} else if (team == 1 && blueTeamBuffs.contains(skil)) {
return 0;
}
GuardianSpawnPoint pt = this.getRandomGuardianSpawn(team);
if (pt == null) {
return -1;
}
int reactorID = 9980000 + team;
MapleReactor reactor = new MapleReactor(MapleReactorFactory.getReactorS(reactorID), reactorID);
pt.setTaken(true);
reactor.setPosition(pt.getPosition());
reactor.setName(team + "" + num); //lol
reactor.resetReactorActions(0);
this.spawnReactor(reactor);
reactor.setGuardian(pt);
this.buffMonsters(team, skil);
getReactorByOid(reactor.getObjectId()).hitReactor(((MapleCharacter) this.getAllPlayer().get(0)).getClient());
} catch (Exception e) {
e.printStackTrace();
}
return 1;
}
public void buffMonsters(int team, MCSkill skil) {
if (team == 0) {
redTeamBuffs.add(skil);
} else if (team == 1) {
blueTeamBuffs.add(skil);
}
for (MapleMapObject mmo : this.mapobjects.values()) {
if (mmo.getType() == MapleMapObjectType.MONSTER) {
MapleMonster mob = (MapleMonster) mmo;
if (mob.getTeam() == team) {
if (skil != null) {
skil.getSkill().applyEffect(null, mob, false, null);
}
}
}
}
}
public final List<Integer> getSkillIds() {
return skillIds;
}
public final void addSkillId(int z) {
this.skillIds.add(z);
}
public final void addMobSpawn(int mobId, int spendCP) {
this.mobsToSpawn.add(new Pair<Integer, Integer>(mobId, spendCP));
}
public final List<Pair<Integer, Integer>> getMobsToSpawn() {
return mobsToSpawn;
}
public boolean isCPQWinnerMap() {
switch (this.getId()) {
case 980000103:
case 980000203:
case 980000303:
case 980000403:
case 980000503:
case 980000603:
case 980031300:
case 980032300:
case 980033300:
return true;
}
return false;
}
public boolean isCPQLoserMap() {
switch (this.getId()) {
case 980000104:
case 980000204:
case 980000304:
case 980000404:
case 980000504:
case 980000604:
case 980031400:
case 980032400:
case 980033400:
return true;
}
return false;
} |
<<<<<<<
public static boolean isMedal(int itemId) {
return itemId >= 1140000 && itemId < 1143000;
}
=======
public static boolean isWeddingRing(int itemId) {
return itemId >= 1112803 && itemId <= 1112809;
}
public static boolean isWeddingToken(int itemId) {
return itemId >= 4031357 && itemId <= 4031364;
}
>>>>>>>
public static boolean isMedal(int itemId) {
return itemId >= 1140000 && itemId < 1143000;
}
public static boolean isWeddingRing(int itemId) {
return itemId >= 1112803 && itemId <= 1112809;
}
public static boolean isWeddingToken(int itemId) {
return itemId >= 4031357 && itemId <= 4031364;
} |
<<<<<<<
// thanks xinyifly for finding out job advancements awarding APs
/*
if (newJob.getId() % 10 >= 1) {
=======
if (newJob.getId() % 10 > 1) {
>>>>>>>
// thanks xinyifly for finding out job advancements awarding APs
/*
if (newJob.getId() % 10 >= 1) {
<<<<<<<
*/
=======
>>>>>>>
*/
<<<<<<<
if(merchant == null) return;
if (closeMerchant) {
if (merchant.isOwner(this) && merchant.getItems().isEmpty()) {
merchant.forceClose();
} else {
merchant.removeVisitor(this);
this.setHiredMerchant(null);
}
=======
if (merchant == null) {
return;
}
if (closeMerchant) {
merchant.removeVisitor(this);
this.setHiredMerchant(null);
>>>>>>>
if (merchant == null) {
return;
}
if (closeMerchant) {
if (merchant.isOwner(this) && merchant.getItems().isEmpty()) {
merchant.forceClose();
} else {
merchant.removeVisitor(this);
this.setHiredMerchant(null);
}
<<<<<<<
public boolean attemptCatchFish(int baitLevel) {
return GameConstants.isFishingArea(mapid) && this.getPosition().getY() > 0 && ItemConstants.isFishingChair(chair.get()) && this.getWorldServer().registerFisherPlayer(this, baitLevel);
}
=======
>>>>>>>
public boolean attemptCatchFish(int baitLevel) {
return GameConstants.isFishingArea(mapid) && this.getPosition().getY() > 0 && ItemConstants.isFishingChair(chair.get()) && this.getWorldServer().registerFisherPlayer(this, baitLevel);
}
<<<<<<<
long curTime = Server.getInstance().getCurrentTime();
if(nextWarningTime < curTime) {
nextWarningTime = curTime + (60 * 1000); // show underlevel info again after 1 minute
showHint("You have gained #rno experience#k from defeating #e#b" + mob.getName() + "#k#n (lv. #b" + mob.getLevel() + "#k)! Take note you must have around the same level as the mob to start earning EXP from it.");
}
}
public void showMapOwnershipInfo(MapleCharacter mapOwner) {
long curTime = Server.getInstance().getCurrentTime();
if(nextWarningTime < curTime) {
nextWarningTime = curTime + (60 * 1000); // show underlevel info again after 1 minute
String medal = "";
Item medalItem = mapOwner.getInventory(MapleInventoryType.EQUIPPED).getItem((short) -49);
if (medalItem != null) {
medal = "<" + ii.getName(medalItem.getItemId()) + "> ";
=======
chrLock.lock();
try {
long curTime = Server.getInstance().getCurrentTime();
if (nextUnderlevelTime < curTime) {
nextUnderlevelTime = curTime + (60 * 1000); // show underlevel info again after 1 minute
showHint("You have gained #rno experience#k from defeating #e#b" + mob.getName() + "#k#n (lv. #b" + mob.getLevel() + "#k)! Take note you must have around the same level as the mob to start earning EXP from it.");
>>>>>>>
long curTime = Server.getInstance().getCurrentTime();
if(nextWarningTime < curTime) {
nextWarningTime = curTime + (60 * 1000); // show underlevel info again after 1 minute
showHint("You have gained #rno experience#k from defeating #e#b" + mob.getName() + "#k#n (lv. #b" + mob.getLevel() + "#k)! Take note you must have around the same level as the mob to start earning EXP from it.");
}
}
public void showMapOwnershipInfo(MapleCharacter mapOwner) {
long curTime = Server.getInstance().getCurrentTime();
if(nextWarningTime < curTime) {
nextWarningTime = curTime + (60 * 1000); // show underlevel info again after 1 minute
String medal = "";
Item medalItem = mapOwner.getInventory(MapleInventoryType.EQUIPPED).getItem((short) -49);
if (medalItem != null) {
medal = "<" + ii.getName(medalItem.getItemId()) + "> "; |
<<<<<<<
if (expSharersMaxLevel < mc.getLevel()) {
expSharersMaxLevel = mc.getLevel();
}
=======
expSharersLevel += mc.getLevel();
>>>>>>>
if (expSharersMaxLevel < mc.getLevel()) {
expSharersMaxLevel = mc.getLevel();
}
<<<<<<<
private void propagateExperienceGains(Map<MapleCharacter, Float> personalExpReward, Map<MapleCharacter, Float> partyExpReward) {
Set<MapleCharacter> expRewardPlayers = new HashSet<>(personalExpReward.keySet());
expRewardPlayers.addAll(partyExpReward.keySet());
for (MapleCharacter chr : expRewardPlayers) {
Float personalExp = personalExpReward.get(chr);
Float partyExp = partyExpReward.get(chr);
this.giveExpToCharacter(chr, personalExp, partyExp);
}
}
=======
>>>>>>>
private void propagateExperienceGains(Map<MapleCharacter, Float> personalExpReward, Map<MapleCharacter, Float> partyExpReward) {
Set<MapleCharacter> expRewardPlayers = new HashSet<>(personalExpReward.keySet());
expRewardPlayers.addAll(partyExpReward.keySet());
for (MapleCharacter chr : expRewardPlayers) {
Float personalExp = personalExpReward.get(chr);
Float partyExp = partyExpReward.get(chr);
this.giveExpToCharacter(chr, personalExp, partyExp);
}
}
<<<<<<<
Map<MapleCharacter, Float> personalExpReward = new HashMap<>();
Map<MapleCharacter, Float> partyExpReward = new HashMap<>();
=======
>>>>>>>
Map<MapleCharacter, Float> personalExpReward = new HashMap<>();
Map<MapleCharacter, Float> partyExpReward = new HashMap<>();
<<<<<<<
if(mc.getLevel() >= minThresholdLevel) {
//NO EXP WILL BE GIVEN for those who are underleveled!
personalExpReward.put(mc, xp);
MapleParty p = mc.getParty();
if (p != null) { // for party bonus exp
int pID = p.getId();
float pXP = xp + (partyExp.containsKey(pID) ? partyExp.get(pID) : 0);
partyExp.put(pID, pXP);
=======
MapleParty p = mc.getParty();
if (p != null) {
int pID = p.getId();
float pXP = xp + (partyExp.containsKey(pID) ? partyExp.get(pID) : 0);
partyExp.put(pID, pXP);
} else {
if (mc.getLevel() >= minThresholdLevel) {
//NO EXP WILL BE GIVEN for those who are underleveled!
giveExpToCharacter(mc, xp, isKiller, 1);
} else {
underleveled.add(mc);
>>>>>>>
if(mc.getLevel() >= minThresholdLevel) {
//NO EXP WILL BE GIVEN for those who are underleveled!
personalExpReward.put(mc, xp);
MapleParty p = mc.getParty();
if (p != null) { // for party bonus exp
int pID = p.getId();
float pXP = xp + (partyExp.containsKey(pID) ? partyExp.get(pID) : 0);
partyExp.put(pID, pXP);
<<<<<<<
int mostDamageCid = this.getHighestDamagerId();
=======
>>>>>>>
int mostDamageCid = this.getHighestDamagerId();
<<<<<<<
statiLock.lock();
try {
MonsterStatusEffect mse = stati.get(MonsterStatus.SHOWDOWN);
if (mse != null) {
multiplier *= (1.0 + (mse.getStati().get(MonsterStatus.SHOWDOWN).doubleValue() / 100.0));
}
} finally {
statiLock.unlock();
}
return multiplier;
}
private static int expValueToInteger(double exp) {
if (exp > Integer.MAX_VALUE) {
exp = Integer.MAX_VALUE;
} else if (exp < Integer.MIN_VALUE) {
exp = Integer.MIN_VALUE;
}
return (int) exp;
}
private void giveExpToCharacter(MapleCharacter attacker, Float personalExp, Float partyExp) {
if (attacker.isAlive()) {
if (personalExp != null) {
personalExp *= getStatusExpMultiplier(attacker);
personalExp *= attacker.getExpRate();
} else {
personalExp = 0.0f;
}
=======
private void giveExpToCharacter(MapleCharacter attacker, float exp, boolean isKiller, int numExpSharers) {
//PARTY BONUS: 2p -> +2% , 3p -> +4% , 4p -> +6% , 5p -> +8% , 6p -> +10%
final float partyModifier = numExpSharers <= 1 ? 0.0f : 0.02f * (numExpSharers - 1);
int partyExp = 0;
if (attacker.getHp() > 0) {
exp *= attacker.getExpRate();
>>>>>>>
statiLock.lock();
try {
MonsterStatusEffect mse = stati.get(MonsterStatus.SHOWDOWN);
if (mse != null) {
multiplier *= (1.0 + (mse.getStati().get(MonsterStatus.SHOWDOWN).doubleValue() / 100.0));
}
} finally {
statiLock.unlock();
}
return multiplier;
}
private static int expValueToInteger(double exp) {
if (exp > Integer.MAX_VALUE) {
exp = Integer.MAX_VALUE;
} else if (exp < Integer.MIN_VALUE) {
exp = Integer.MIN_VALUE;
}
return (int) exp;
}
private void giveExpToCharacter(MapleCharacter attacker, Float personalExp, Float partyExp) {
if (attacker.isAlive()) {
if (personalExp != null) {
personalExp *= getStatusExpMultiplier(attacker);
personalExp *= attacker.getExpRate();
} else {
personalExp = 0.0f;
}
<<<<<<<
=======
int personalExp = (int) exp;
if (exp <= Integer.MAX_VALUE) { // assuming no negative xp here
if (partyModifier > 0.0f) {
partyExp = (int) (personalExp * partyModifier * ServerConstants.PARTY_BONUS_EXP_RATE);
}
Integer holySymbol = attacker.getBuffedValue(MapleBuffStat.HOLY_SYMBOL);
if (holySymbol != null) {
personalExp *= 1.0 + (holySymbol.doubleValue() / 100.0);
}
>>>>>>>
<<<<<<<
int _partyExp = expValueToInteger(partyExp);
attacker.gainExp(_personalExp, _partyExp, true, false, false);
attacker.increaseEquipExp(_personalExp);
=======
attacker.gainExp(personalExp, partyExp, true, false, isKiller);
attacker.increaseEquipExp(personalExp);
>>>>>>>
int _partyExp = expValueToInteger(partyExp);
attacker.gainExp(_personalExp, _partyExp, true, false, false);
attacker.increaseEquipExp(_personalExp); |
<<<<<<<
private Components components;
=======
private final Set<String> operationIDs = new HashSet<>();
>>>>>>>
private Components components;
private final Set<String> operationIDs = new HashSet<>(); |
<<<<<<<
public PathItem processRefToExternalPathItem(String $ref, RefFormat refFormat) {
final PathItem pathItem = cache.loadRef($ref, refFormat, PathItem.class);
String newRef;
Map<String, PathItem> paths = openAPI.getPaths();
if (paths == null) {
paths = new LinkedHashMap<>();
}
final String possiblyConflictingDefinitionName = computeDefinitionName($ref);
PathItem existingPathItem = paths.get(possiblyConflictingDefinitionName);
if (existingPathItem != null) {
LOGGER.debug("A model for " + existingPathItem + " already exists");
if(existingPathItem.get$ref() != null) {
// use the new model
existingPathItem = null;
}
}
newRef = possiblyConflictingDefinitionName;
cache.putRenamedRef($ref, newRef);
if(pathItem != null) {
if(pathItem.readOperationsMap() != null) {
final Map<PathItem.HttpMethod, Operation> operationMap = pathItem.readOperationsMap();
for (PathItem.HttpMethod httpMethod : operationMap.keySet()) {
Operation operation = operationMap.get(httpMethod);
if (operation.getResponses() != null) {
final Map<String, ApiResponse> responses = operation.getResponses();
if (responses != null) {
for (String responseCode : responses.keySet()) {
ApiResponse response = responses.get(responseCode);
if (response != null) {
Schema schema = null;
if (response.getContent() != null) {
Map<String, MediaType> content = response.getContent();
for (String mediaName : content.keySet()) {
MediaType mediaType = content.get(mediaName);
if (mediaType.getSchema() != null) {
schema = mediaType.getSchema();
if (schema != null) {
processRefSchemaObject(mediaType.getSchema(), $ref);
}
}
}
}
}
}
}
}
if (operation.getRequestBody() != null) {
RequestBody body = operation.getRequestBody();
Schema schema = null;
if (body.getContent() != null) {
Map<String, MediaType> content = body.getContent();
for (String mediaName : content.keySet()) {
MediaType mediaType = content.get(mediaName);
if (mediaType.getSchema() != null) {
schema = mediaType.getSchema();
if (schema != null) {
processRefSchemaObject(mediaType.getSchema(), $ref);
}
}
}
}
}
}
}
}
return pathItem;
}
=======
private void processDiscriminator(Discriminator d, String file) {
if (d != null && d.getMapping() != null) {
processDiscriminatorMapping(d.getMapping(), file);
}
}
private void processDiscriminatorMapping(Map<String, String> mapping, String file) {
for (String key : mapping.keySet()) {
String ref = mapping.get(key);
Schema subtype = new Schema().$ref(ref);
processSchema(subtype, file);
mapping.put(key, subtype.get$ref());
}
}
>>>>>>>
public PathItem processRefToExternalPathItem(String $ref, RefFormat refFormat) {
final PathItem pathItem = cache.loadRef($ref, refFormat, PathItem.class);
String newRef;
Map<String, PathItem> paths = openAPI.getPaths();
if (paths == null) {
paths = new LinkedHashMap<>();
}
final String possiblyConflictingDefinitionName = computeDefinitionName($ref);
PathItem existingPathItem = paths.get(possiblyConflictingDefinitionName);
if (existingPathItem != null) {
LOGGER.debug("A model for " + existingPathItem + " already exists");
if(existingPathItem.get$ref() != null) {
// use the new model
existingPathItem = null;
}
}
newRef = possiblyConflictingDefinitionName;
cache.putRenamedRef($ref, newRef);
if(pathItem != null) {
if(pathItem.readOperationsMap() != null) {
final Map<PathItem.HttpMethod, Operation> operationMap = pathItem.readOperationsMap();
for (PathItem.HttpMethod httpMethod : operationMap.keySet()) {
Operation operation = operationMap.get(httpMethod);
if (operation.getResponses() != null) {
final Map<String, ApiResponse> responses = operation.getResponses();
if (responses != null) {
for (String responseCode : responses.keySet()) {
ApiResponse response = responses.get(responseCode);
if (response != null) {
Schema schema = null;
if (response.getContent() != null) {
Map<String, MediaType> content = response.getContent();
for (String mediaName : content.keySet()) {
MediaType mediaType = content.get(mediaName);
if (mediaType.getSchema() != null) {
schema = mediaType.getSchema();
if (schema != null) {
processRefSchemaObject(mediaType.getSchema(), $ref);
}
}
}
}
}
}
}
}
if (operation.getRequestBody() != null) {
RequestBody body = operation.getRequestBody();
Schema schema = null;
if (body.getContent() != null) {
Map<String, MediaType> content = body.getContent();
for (String mediaName : content.keySet()) {
MediaType mediaType = content.get(mediaName);
if (mediaType.getSchema() != null) {
schema = mediaType.getSchema();
if (schema != null) {
processRefSchemaObject(mediaType.getSchema(), $ref);
}
}
}
}
}
}
}
}
return pathItem;
}
private void processDiscriminator(Discriminator d, String file) {
if (d != null && d.getMapping() != null) {
processDiscriminatorMapping(d.getMapping(), file);
}
}
private void processDiscriminatorMapping(Map<String, String> mapping, String file) {
for (String key : mapping.keySet()) {
String ref = mapping.get(key);
Schema subtype = new Schema().$ref(ref);
processSchema(subtype, file);
mapping.put(key, subtype.get$ref());
}
} |
<<<<<<<
@Test
public void referringSpecWithoutComponentsTag() throws Exception {
ParseOptions resolve = new ParseOptions();
resolve.setResolveFully(true);
final OpenAPI openAPI = new OpenAPIV3Parser().read("src/test/resources/ref-without-component/a.yaml", null, resolve);
Map<String, Schema> schemas = openAPI.getComponents().getSchemas();
Assert.assertEquals("Example value", schemas.get("CustomerType").getExample());
}
=======
@Test
public void testRefNameConflicts() throws Exception {
ParseOptions options = new ParseOptions();
options.setResolveFully(true);
OpenAPI openAPI = new OpenAPIV3Parser().readLocation("src/test/resources/refs-name-conflict/a.yaml",null, options).getOpenAPI();
AssertJUnit.assertEquals("local", ((Schema) openAPI.getPaths().get("/newPerson").getPost().getResponses().get("200").getContent().get("*/*").getSchema().getProperties().get("location")).getExample());
AssertJUnit.assertEquals("referred", ((Schema)openAPI.getPaths().get("/oldPerson").getPost().getResponses().get("200").getContent().get("*/*").getSchema().getProperties().get("location")).getExample());
AssertJUnit.assertEquals("referred", ((Schema)openAPI.getPaths().get("/yetAnotherPerson").getPost().getResponses().get("200").getContent().get("*/*").getSchema().getProperties().get("location")).getExample());
AssertJUnit.assertEquals("local", ((Schema) openAPI.getComponents().getSchemas().get("PersonObj").getProperties().get("location")).getExample());
AssertJUnit.assertEquals("referred", ((Schema) openAPI.getComponents().getSchemas().get("PersonObj_2").getProperties().get("location")).getExample());
}
>>>>>>>
public void referringSpecWithoutComponentsTag() throws Exception {
ParseOptions resolve = new ParseOptions();
resolve.setResolveFully(true);
final OpenAPI openAPI = new OpenAPIV3Parser().read("src/test/resources/ref-without-component/a.yaml", null, resolve);
Map<String, Schema> schemas = openAPI.getComponents().getSchemas();
Assert.assertEquals("Example value", schemas.get("CustomerType").getExample());
}
public void testRefNameConflicts() throws Exception {
ParseOptions options = new ParseOptions();
options.setResolveFully(true);
OpenAPI openAPI = new OpenAPIV3Parser().readLocation("src/test/resources/refs-name-conflict/a.yaml",null, options).getOpenAPI();
AssertJUnit.assertEquals("local", ((Schema) openAPI.getPaths().get("/newPerson").getPost().getResponses().get("200").getContent().get("*/*").getSchema().getProperties().get("location")).getExample());
AssertJUnit.assertEquals("referred", ((Schema)openAPI.getPaths().get("/oldPerson").getPost().getResponses().get("200").getContent().get("*/*").getSchema().getProperties().get("location")).getExample());
AssertJUnit.assertEquals("referred", ((Schema)openAPI.getPaths().get("/yetAnotherPerson").getPost().getResponses().get("200").getContent().get("*/*").getSchema().getProperties().get("location")).getExample());
AssertJUnit.assertEquals("local", ((Schema) openAPI.getComponents().getSchemas().get("PersonObj").getProperties().get("location")).getExample());
AssertJUnit.assertEquals("referred", ((Schema) openAPI.getComponents().getSchemas().get("PersonObj_2").getProperties().get("location")).getExample());
} |
<<<<<<<
import io.swagger.v3.oas.models.headers.Header;
=======
import io.swagger.v3.oas.models.links.Link;
>>>>>>>
import io.swagger.v3.oas.models.headers.Header;
import io.swagger.v3.oas.models.links.Link;
<<<<<<<
@Test
public void testIssue915() {
ParseOptions parseOptions = new ParseOptions();
parseOptions.setResolveFully(true);
OpenAPI openAPI = new OpenAPIV3Parser().read("src/test/resources/issue_918.yaml", null, parseOptions);
Map<String, Header> headers = openAPI.getPaths().get("/2.0/users/").getGet().getResponses().get("200").getHeaders();
String description = headers.get("X-Rate-Limit").getDescription();
assertEquals(description, "The number of allowed requests in the current period");
}
=======
@Test
public void shouldParseParameters() {
ParseOptions parseOptions = new ParseOptions();
parseOptions.setResolveFully(true);
OpenAPI openAPI = new OpenAPIV3Parser().read("src/test/resources/issue_877.yaml", null, parseOptions);
Parameter parameter = openAPI.getPaths().get("/adopt").getGet().getParameters().get(0);
assertNotNull(parameter);
assertEquals(parameter.getIn(), "path");
assertEquals(parameter.getName(), "playerId");
}
@Test
public void testIssue884() {
ParseOptions parseOptions = new ParseOptions();
parseOptions.setResolveFully(true);
OpenAPI openAPI = new OpenAPIV3Parser().read("src/test/resources/issue_884.yaml", null, parseOptions);
Map<String, Link> links = openAPI.getPaths().get("/2.0/repositories/{username}").getGet().getResponses().get("200").getLinks();
String operationId = links.get("userRepository").getOperationId();
assertEquals(operationId, "getRepository");
}
@Test
public void testLinkIssue() {
ParseOptions parseOptions = new ParseOptions();
parseOptions.setResolveFully(true);
OpenAPI openAPI = new OpenAPIV3Parser().read("src/test/resources/linkIssue.yaml", null, parseOptions);
Map<String, Link> links = openAPI.getPaths().get("/2.0/repositories/{username}").getGet().getResponses().get("200").getLinks();
Object requestBody = links.get("userRepository").getRequestBody();
assertEquals(requestBody, "$response.body#/slug");
}
>>>>>>>
@Test
public void testIssue915() {
ParseOptions parseOptions = new ParseOptions();
parseOptions.setResolveFully(true);
OpenAPI openAPI = new OpenAPIV3Parser().read("src/test/resources/issue_918.yaml", null, parseOptions);
Map<String, Header> headers = openAPI.getPaths().get("/2.0/users/").getGet().getResponses().get("200").getHeaders();
String description = headers.get("X-Rate-Limit").getDescription();
assertEquals(description, "The number of allowed requests in the current period");
}
public void shouldParseParameters() {
ParseOptions parseOptions = new ParseOptions();
parseOptions.setResolveFully(true);
OpenAPI openAPI = new OpenAPIV3Parser().read("src/test/resources/issue_877.yaml", null, parseOptions);
Parameter parameter = openAPI.getPaths().get("/adopt").getGet().getParameters().get(0);
assertNotNull(parameter);
assertEquals(parameter.getIn(), "path");
assertEquals(parameter.getName(), "playerId");
}
@Test
public void testIssue884() {
ParseOptions parseOptions = new ParseOptions();
parseOptions.setResolveFully(true);
OpenAPI openAPI = new OpenAPIV3Parser().read("src/test/resources/issue_884.yaml", null, parseOptions);
Map<String, Link> links = openAPI.getPaths().get("/2.0/repositories/{username}").getGet().getResponses().get("200").getLinks();
String operationId = links.get("userRepository").getOperationId();
assertEquals(operationId, "getRepository");
}
@Test
public void testLinkIssue() {
ParseOptions parseOptions = new ParseOptions();
parseOptions.setResolveFully(true);
OpenAPI openAPI = new OpenAPIV3Parser().read("src/test/resources/linkIssue.yaml", null, parseOptions);
Map<String, Link> links = openAPI.getPaths().get("/2.0/repositories/{username}").getGet().getResponses().get("200").getLinks();
Object requestBody = links.get("userRepository").getRequestBody();
assertEquals(requestBody, "$response.body#/slug");
} |
<<<<<<<
@Test
public void testIssue1236() {
final ParseOptions options = new ParseOptions();
options.setResolve(true);
SwaggerParseResult result = new OpenAPIV3Parser()
.readLocation("src/test/resources/issue-1236/petstore.json",null,options);
assertEquals(result.getMessages().get(0),"attribute .servers. invalid url : /te st/sample.yaml");
}
=======
@Test
public void testSampleParser() {
final String location = "src/test/resources/issue-1211.json";
final ParseOptions options = new ParseOptions();
options.setResolve(true);
final OpenAPIV3Parser parser = new OpenAPIV3Parser();
final SwaggerParseResult result = parser.readLocation(location, null, options);
System.out.println(result.getMessages());
OpenAPI openAPI = result.getOpenAPI();
assertNotNull(result.getOpenAPI());
assertTrue(result.getMessages().size() > 0);
assertEquals(result.getMessages().get(0).contains("attribute components.schemas.Pet. writeOnly and readOnly are both present"), true);
}
@Test
public void testDuplicateHttpStatusCodes() {
final String location = "src/test/resources/duplicateHttpStatusCodes.json";
final ParseOptions options = new ParseOptions();
options.setResolve(true);
final OpenAPIV3Parser parser = new OpenAPIV3Parser();
final SwaggerParseResult result = parser.readLocation(location, null, options);
assertNull(result.getOpenAPI());
List<String> messages = result.getMessages();
assertEquals(1, messages.size());
assertEquals(messages.get(0), "Duplicate field '200' in `src/test/resources/duplicateHttpStatusCodes.json`");
}
>>>>>>>
@Test
public void testIssue1236() {
final ParseOptions options = new ParseOptions();
options.setResolve(true);
SwaggerParseResult result = new OpenAPIV3Parser()
.readLocation("src/test/resources/issue-1236/petstore.json",null,options);
assertEquals(result.getMessages().get(0),"attribute .servers. invalid url : /te st/sample.yaml");
public void testSampleParser() {
final String location = "src/test/resources/issue-1211.json";
final ParseOptions options = new ParseOptions();
options.setResolve(true);
final OpenAPIV3Parser parser = new OpenAPIV3Parser();
final SwaggerParseResult result = parser.readLocation(location, null, options);
System.out.println(result.getMessages());
OpenAPI openAPI = result.getOpenAPI();
assertNotNull(result.getOpenAPI());
assertTrue(result.getMessages().size() > 0);
assertEquals(result.getMessages().get(0).contains("attribute components.schemas.Pet. writeOnly and readOnly are both present"), true);
}
@Test
public void testDuplicateHttpStatusCodes() {
final String location = "src/test/resources/duplicateHttpStatusCodes.json";
final ParseOptions options = new ParseOptions();
options.setResolve(true);
final OpenAPIV3Parser parser = new OpenAPIV3Parser();
final SwaggerParseResult result = parser.readLocation(location, null, options);
assertNull(result.getOpenAPI());
List<String> messages = result.getMessages();
assertEquals(1, messages.size());
assertEquals(messages.get(0), "Duplicate field '200' in `src/test/resources/duplicateHttpStatusCodes.json`");
} |
<<<<<<<
@Test
public void testRemoteParameterIssue1103(@Injectable final List<AuthorizationValue> auths) throws Exception{
OpenAPI result = new OpenAPIV3Parser().read("issue-1103/remote-parameter-swagger.yaml");
Assert.assertNotNull(result);
Assert.assertEquals(result.getPaths().get("/Translation/{lang}").getPut().getParameters().get(0).getName(), "lang");
}
@Test
public void test30NoOptions(@Injectable final List<AuthorizationValue> auths) throws Exception{
=======
>>>>>>>
@Test
public void testRemoteParameterIssue1103(@Injectable final List<AuthorizationValue> auths) throws Exception{
OpenAPI result = new OpenAPIV3Parser().read("issue-1103/remote-parameter-swagger.yaml");
Assert.assertNotNull(result);
Assert.assertEquals(result.getPaths().get("/Translation/{lang}").getPut().getParameters().get(0).getName(), "lang");
}
@Test
public void test30NoOptions(@Injectable final List<AuthorizationValue> auths) throws Exception{ |
<<<<<<<
private List<Location> warnings = new ArrayList<>();
=======
private List<Location> unique = new ArrayList<>();
>>>>>>>
private List<Location> warnings = new ArrayList<>();
private List<Location> unique = new ArrayList<>(); |
<<<<<<<
import io.swagger.v3.oas.models.media.Schema;
=======
import io.swagger.v3.oas.models.PathItem;
import io.swagger.v3.oas.models.media.Schema;
import io.swagger.v3.oas.models.parameters.Parameter;
>>>>>>>
import io.swagger.v3.oas.models.media.Schema;
import io.swagger.v3.oas.models.PathItem;
import io.swagger.v3.oas.models.media.Schema;
import io.swagger.v3.oas.models.parameters.Parameter; |
<<<<<<<
ScriptEngine nashorn = new NashornScriptEngineFactory().getScriptEngine();
=======
//ScriptEngineManager manager = new ScriptEngineManager();
//ScriptEngine nashorn = manager.getEngineByName("nashorn");
ScriptEngine nashorn = new ScriptEngineManager(null).getEngineByName("nashorn");
//ScriptEngine nashorn = new NashornScriptEngineFactory().getScriptEngine();
>>>>>>>
ScriptEngine nashorn = new ScriptEngineManager(null).getEngineByName("nashorn"); |
<<<<<<<
try (AbstractPdb pdb = ghidra.app.util.bin.format.pdb2.pdbreader.PdbParser
.parse(pdbFile.getAbsolutePath(), pdbReaderOptions, monitor)) {
=======
PdbProgramAttributes programAttributes = new PdbProgramAttributes(program);
try (AbstractPdb pdb =
ghidra.app.util.bin.format.pdb2.pdbreader.PdbParser.parse(pdbFile.getAbsolutePath(),
pdbReaderOptions, monitor)) {
PdbIdentifiers identifiers = pdb.getIdentifiers();
if (!PdbLocator.verifyPdbSignature(programAttributes, identifiers)) {
StringBuilder builder = new StringBuilder();
builder.append("Selected PDB does not match program's PDB specification!\n");
builder.append(BLANK_LINE);
builder.append("Program's PDB specification:\n");
builder.append(PdbLocator.formatPdbIdentifiers(programAttributes));
builder.append(BLANK_LINE);
builder.append("Selected PDB file specification:\n");
builder.append(
PdbLocator.formatPdbIdentifiers(pdbFile.getAbsolutePath(), identifiers));
builder.append(BLANK_LINE);
builder.append("Do you wish to force load this PDB?");
if (OptionDialog.YES_OPTION != OptionDialog.showYesNoDialog(null,
"Confirm PDB Load", builder.toString())) {
return false;
}
}
>>>>>>>
PdbProgramAttributes programAttributes = new PdbProgramAttributes(program);
try (AbstractPdb pdb = ghidra.app.util.bin.format.pdb2.pdbreader.PdbParser
.parse(pdbFile.getAbsolutePath(), pdbReaderOptions, monitor)) {
PdbIdentifiers identifiers = pdb.getIdentifiers();
if (!PdbLocator.verifyPdbSignature(programAttributes, identifiers)) {
StringBuilder builder = new StringBuilder();
builder.append("Selected PDB does not match program's PDB specification!\n");
builder.append(BLANK_LINE);
builder.append("Program's PDB specification:\n");
builder.append(PdbLocator.formatPdbIdentifiers(programAttributes));
builder.append(BLANK_LINE);
builder.append("Selected PDB file specification:\n");
builder.append(
PdbLocator.formatPdbIdentifiers(pdbFile.getAbsolutePath(), identifiers));
builder.append(BLANK_LINE);
builder.append("Do you wish to force load this PDB?");
if (OptionDialog.YES_OPTION != OptionDialog.showYesNoDialog(null,
"Confirm PDB Load", builder.toString())) {
return false;
}
} |
<<<<<<<
/*
* @author Dave Rusek <drusuk at basho dot com>
* @since 2.0
*/
public class RiakSet extends RiakDatatype<Set<byte[]>>
=======
public class RiakSet extends RiakDatatype<Set<BinaryValue>>
>>>>>>>
/*
* @author Dave Rusek <drusuk at basho dot com>
* @since 2.0
*/
public class RiakSet extends RiakDatatype<Set<BinaryValue>> |
<<<<<<<
/**
* Creates a new "Set" cell from the provided raw encoded value.
* Use the @{link SetCell} class to help in encoding/decoding these values.
* @param value The raw binary data for the set value.
* @return The new set Cell.
*/
public static Cell newSet(byte[][] value)
{
Cell cell = new Cell();
cell.setValue = value;
return cell;
}
/**
* Creates a new "Map" cell from the provided raw encoded value.
* Use the @{link MapCell} class to help in encoding/decoding these values.
* @param value The raw binary data for the map value.
* @return The new map Cell.
*/
public static Cell newMap(byte[] value)
{
Cell cell = new Cell();
cell.mapValue = value;
return cell;
}
@Override
public String toString()
{
final StringBuilder sb = new StringBuilder("Cell{ ");
if(this.hasBinaryValue())
{
final String value = this.getUtf8String();
if(value.length() > 32)
{
sb.append(value.substring(0,32));
sb.append("...");
}
else
{
sb.append(value);
}
}
else if(this.hasLong())
{
sb.append(this.getLong());
}
else if(this.hasFloat())
{
sb.append(this.getFloat());
}
else if(this.hasDouble())
{
sb.append(this.getDouble());
}
else if(this.hasNumeric())
{
sb.append(this.getRawNumericString());
}
else if(this.hasTimestamp())
{
sb.append(this.getTimestamp());
}
else if(this.hasBoolean())
{
sb.append(this.getBoolean());
}
sb.append(" }");
return sb.toString();
}
=======
>>>>>>>
@Override
public String toString()
{
final StringBuilder sb = new StringBuilder("Cell{ ");
if(this.hasBinaryValue())
{
final String value = this.getUtf8String();
if(value.length() > 32)
{
sb.append(value.substring(0,32));
sb.append("...");
}
else
{
sb.append(value);
}
}
else if(this.hasLong())
{
sb.append(this.getLong());
}
else if(this.hasDouble())
{
sb.append(this.getDouble());
}
else if(this.hasTimestamp())
{
sb.append(this.getTimestamp());
}
else if(this.hasBoolean())
{
sb.append(this.getBoolean());
}
sb.append(" }");
return sb.toString();
} |
<<<<<<<
=======
import ghidra.framework.options.Options;
import ghidra.program.model.address.AddressSpace;
>>>>>>>
import ghidra.framework.options.Options;
<<<<<<<
import ghidra.util.Msg;
=======
import ghidra.util.*;
>>>>>>>
import ghidra.util.Msg;
import ghidra.util.NumericUtilities; |
<<<<<<<
import com.basho.riak.client.operations.datatypes.Context;
import com.basho.riak.client.operations.datatypes.RiakDatatype;
=======
>>>>>>>
import com.basho.riak.client.operations.datatypes.Context; |
<<<<<<<
import org.mybatis.dynamic.sql.SqlTable;
import org.mybatis.dynamic.sql.render.RenderingUtilities;
=======
>>>>>>>
<<<<<<<
+ tableNameIncludingAlias(tableAlias)
+ joinClause().map(w -> ONE_SPACE + w).orElse(EMPTY_STRING)
=======
+ tableName()
>>>>>>>
+ tableName()
+ joinClause().map(w -> ONE_SPACE + w).orElse(EMPTY_STRING)
<<<<<<<
private String tableNameIncludingAlias(Optional<String> tableAlias) {
return RenderingUtilities.tableNameIncludingAlias(table(), tableAlias);
}
=======
>>>>>>>
<<<<<<<
private SqlTable table;
private Optional<String> joinClause;
=======
>>>>>>>
private Optional<String> joinClause;
<<<<<<<
public Builder withTableAlias(Optional<String> tableAlias) {
this.tableAlias = tableAlias;
return this;
}
public Builder withJoinClause(Optional<String> joinClause) {
this.joinClause = joinClause;
return this;
}
=======
>>>>>>>
public Builder withJoinClause(Optional<String> joinClause) {
this.joinClause = joinClause;
return this;
} |
<<<<<<<
public void testGeneralInsert() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
AnimalDataMapper mapper = sqlSession.getMapper(AnimalDataMapper.class);
GeneralInsertStatementProvider insertStatement = insertInto(animalData)
.set(id).equalTo(101)
.set(animalName).equalToStringConstant("Fred")
.set(brainWeight).equalToConstant("2.2")
.set(bodyWeight).equalTo(4.5)
.build()
.render(RenderingStrategies.MYBATIS3);
String expected = "insert into AnimalData (id, animal_name, brain_weight, body_weight) "
+ "values (#{parameters.p1,jdbcType=INTEGER}, 'Fred', 2.2, #{parameters.p2,jdbcType=DOUBLE})";
assertThat(insertStatement.getInsertStatement()).isEqualTo(expected);
assertThat(insertStatement.getParameters().size()).isEqualTo(2);
assertThat(insertStatement.getParameters().get("p1")).isEqualTo(101);
assertThat(insertStatement.getParameters().get("p2")).isEqualTo(4.5);
int rows = mapper.generalInsert(insertStatement);
assertThat(rows).isEqualTo(1);
}
}
@Test
public void testUpdateWithSelect() {
=======
void testUpdateWithSelect() {
>>>>>>>
void testGeneralInsert() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
AnimalDataMapper mapper = sqlSession.getMapper(AnimalDataMapper.class);
GeneralInsertStatementProvider insertStatement = insertInto(animalData)
.set(id).equalTo(101)
.set(animalName).equalToStringConstant("Fred")
.set(brainWeight).equalToConstant("2.2")
.set(bodyWeight).equalTo(4.5)
.build()
.render(RenderingStrategies.MYBATIS3);
String expected = "insert into AnimalData (id, animal_name, brain_weight, body_weight) "
+ "values (#{parameters.p1,jdbcType=INTEGER}, 'Fred', 2.2, #{parameters.p2,jdbcType=DOUBLE})";
assertThat(insertStatement.getInsertStatement()).isEqualTo(expected);
assertThat(insertStatement.getParameters().size()).isEqualTo(2);
assertThat(insertStatement.getParameters().get("p1")).isEqualTo(101);
assertThat(insertStatement.getParameters().get("p2")).isEqualTo(4.5);
int rows = mapper.generalInsert(insertStatement);
assertThat(rows).isEqualTo(1);
}
}
@Test
void testUpdateWithSelect() { |
<<<<<<<
public void testGeneralInsert() {
try (SqlSession session = sqlSessionFactory.openSession()) {
PersonMapper mapper = session.getMapper(PersonMapper.class);
int rows = mapper.insert(c ->
c.set(id).equalTo(100)
.set(firstName).equalTo("Joe")
.set(lastName).equalTo(LastName.of("Jones"))
.set(birthDate).equalTo(new Date())
.set(employed).equalTo(true)
.set(occupation).equalTo("Developer")
.set(addressId).equalTo(1)
);
assertThat(rows).isEqualTo(1);
}
}
@Test
public void testInsertMultiple() {
=======
void testInsertMultiple() {
>>>>>>>
void testGeneralInsert() {
try (SqlSession session = sqlSessionFactory.openSession()) {
PersonMapper mapper = session.getMapper(PersonMapper.class);
int rows = mapper.insert(c ->
c.set(id).equalTo(100)
.set(firstName).equalTo("Joe")
.set(lastName).equalTo(LastName.of("Jones"))
.set(birthDate).equalTo(new Date())
.set(employed).equalTo(true)
.set(occupation).equalTo("Developer")
.set(addressId).equalTo(1)
);
assertThat(rows).isEqualTo(1);
}
}
@Test
void testInsertMultiple() { |
<<<<<<<
FontRenderContext frc = g2.getFontRenderContext();
//fontAscent = (int) new TextLayout("H", font, frc).getAscent();
fontAscent = (int) new TextLayout("H", font, frc).getBounds().getHeight();
=======
>>>>>>>
FontRenderContext frc = g2.getFontRenderContext();
//fontAscent = (int) new TextLayout("H", font, frc).getAscent();
fontAscent = (int) new TextLayout("H", font, frc).getBounds().getHeight(); |
<<<<<<<
return CommentImpl.deserialize(data, start, len, pooled);
=======
return CommentImpl.deserialize(data, start, len, doc, pooled);
case Node.CDATA_SECTION_NODE :
return CDATASectionImpl.deserialize(data, start, len, doc, pooled);
>>>>>>>
return CommentImpl.deserialize(data, start, len, pooled);
case Node.CDATA_SECTION_NODE :
return CDATASectionImpl.deserialize(data, start, len, doc, pooled); |
<<<<<<<
final String prompt = Language.text("open");
if (Preferences.getBoolean("chooser.files.native")) { // don't use native dialogs on Linux //$NON-NLS-1$
// get the front-most window frame for placing file dialog
FileDialog fd = new FileDialog(activeEditor, prompt, FileDialog.LOAD);
=======
final String prompt = "Open a Processing sketch...";
// don't use native dialogs on Linux (or anyone else w/ override)
if (Preferences.getBoolean("chooser.files.native")) { //$NON-NLS-1$
// use the front-most window frame for placing file dialog
FileDialog openDialog =
new FileDialog(activeEditor, prompt, FileDialog.LOAD);
>>>>>>>
final String prompt = Language.text("open");
// don't use native dialogs on Linux (or anyone else w/ override)
if (Preferences.getBoolean("chooser.files.native")) { //$NON-NLS-1$
// use the front-most window frame for placing file dialog
FileDialog openDialog =
new FileDialog(activeEditor, prompt, FileDialog.LOAD); |
<<<<<<<
"Returns the content of a POST request as string",
=======
"Returns the content of a POST request as an XML document or a string representaion. Returns an empty sequence if there is no data.",
>>>>>>>
"Returns the content of a POST request as an XML document or a string representaion. Returns an empty sequence if there is no data.",
<<<<<<<
if (value.getObject() instanceof RequestWrapper) {
RequestWrapper request = (RequestWrapper)value.getObject();
try {
=======
if(value.getObject() instanceof RequestWrapper)
{
RequestWrapper request = (RequestWrapper)value.getObject();
//if the content length is unknown, return
if(request.getContentLength() == -1)
{
return Sequence.EMPTY_SEQUENCE;
}
//first, get the content of the request
byte[] bufRequestData = null;
try
{
>>>>>>>
if(value.getObject() instanceof RequestWrapper)
{
RequestWrapper request = (RequestWrapper)value.getObject();
//if the content length is unknown, return
if(request.getContentLength() == -1)
{
return Sequence.EMPTY_SEQUENCE;
}
//first, get the content of the request
try
{ |
<<<<<<<
frame.setLocationRelativeTo(null);
=======
// if (frame != null) {
// frame.setLocationRelativeTo(null);
// }
>>>>>>>
frame.setLocationRelativeTo(null);
// if (frame != null) {
// frame.setLocationRelativeTo(null);
// } |
<<<<<<<
// label = new JLabel(" ("+Language.text("preferences.requires_restart")+")");
// label = new JLabel(" (requires restart of Processing)");
// box.add(label);
=======
>>>>>>>
<<<<<<<
// Launch programs as [ ] 32-bit [ ] 64-bit (Mac OS X only)
/*
if (Base.isMacOS()) {
box = Box.createHorizontalBox();
label = new JLabel(Language.text("preferences.launch_programs_in")+" ");
box.add(label);
bitsThirtyTwoButton = new JRadioButton("32-bit "+Language.text("preferences.launch_programs_in.mode")+" ");
box.add(bitsThirtyTwoButton);
bitsSixtyFourButton = new JRadioButton("64-bit "+Language.text("preferences.launch_programs_in.mode"));
box.add(bitsSixtyFourButton);
ButtonGroup bg = new ButtonGroup();
bg.add(bitsThirtyTwoButton);
bg.add(bitsSixtyFourButton);
pain.add(box);
d = box.getPreferredSize();
box.setBounds(left, top, d.width, d.height);
top += d.height + GUI_BETWEEN;
}
*/
=======
>>>>>>> |
<<<<<<<
childList = null;
=======
maxDepth = other.maxDepth;
childAddress = null;
>>>>>>>
childAddress = null;
<<<<<<<
if(childList != null)
System.arraycopy(childList, 0, newChildList, 0, childList.length);
childList = newChildList;
}
=======
if(childAddress != null)
System.arraycopy(childAddress, 0, newChildList, 0, childAddress.length);
childAddress = newChildList;
}
>>>>>>>
if(childAddress != null)
System.arraycopy(childAddress, 0, newChildList, 0, childAddress.length);
childAddress = newChildList;
}
<<<<<<<
=======
/*
if (!(refChild instanceof StoredNode))
throw new DOMException(DOMException.WRONG_DOCUMENT_ERR, "wrong node type");
StoredNode ref = (StoredNode) refChild;
long next, last = -1;
int idx = -1;
for(int i = children - 1; i >= 0; i--) {
next = childAddress[i];
if (StorageAddress.equals(ref.internalAddress, next)) {
idx = i - 1;
break;
}
}
if (idx < 0)
throw new DOMException(DOMException.HIERARCHY_REQUEST_ERR, "reference node not found");
last = childAddress[idx];
StoredNode prev = (StoredNode) broker.objectWith(
new NodeProxy(this, NodeProxy.UNKNOWN_NODE_GID, last));
for (int i = 0; i < nodes.getLength(); i++) {
prev = (StoredNode) appendChild(null, prev, nodes.item(i));
++children;
resizeChildList();
childAddress[++idx] = prev.internalAddress;
}
broker.storeDocument(null, this);
*/
>>>>>>>
<<<<<<<
=======
/*
if (!(refChild instanceof StoredNode))
throw new DOMException(DOMException.WRONG_DOCUMENT_ERR, "wrong node type");
StoredNode ref = (StoredNode) refChild;
long next, last = -1;
int idx = -1;
for(int i = 0; i < children; i++) {
next = childAddress[i];
if (StorageAddress.equals(ref.internalAddress, next)) {
last = next;
idx = i + 1;
break;
}
}
if (last < 0)
throw new DOMException(DOMException.HIERARCHY_REQUEST_ERR, "reference node not found");
StoredNode prev = getLastNode( (StoredNode) broker.objectWith(
new NodeProxy(this, NodeProxy.UNKNOWN_NODE_GID, last)) );
for (int i = 0; i < nodes.getLength(); i++) {
prev = (StoredNode) appendChild(null, prev, nodes.item(i));
++children;
resizeChildList();
childAddress[idx] = prev.internalAddress;
}
broker.storeDocument(null, this);
*/
>>>>>>>
<<<<<<<
new NodeProxy(this, NodeId.DOCUMENT_NODE, address)
=======
new NodeProxy(this, NodeProxy.DOCUMENT_ELEMENT_GID, childAddress[0])
>>>>>>>
new NodeProxy(this, NodeId.DOCUMENT_NODE, childAddress[0])
<<<<<<<
new NodeProxy(this, NodeId.DOCUMENT_NODE, childList[i])
=======
new NodeProxy(this, NodeProxy.DOCUMENT_ELEMENT_GID, childAddress[i])
>>>>>>>
new NodeProxy(this, NodeId.DOCUMENT_NODE, childAddress[i]) |
<<<<<<<
this.serviceName = StringUtils.isEmpty(serviceName) ? "nova" : serviceName;
// this.initToken();
=======
this.serviceName = serviceName;
this.initToken();
>>>>>>>
this.serviceName = serviceName;
// this.initToken(); |
<<<<<<<
private final static String SIMPLE_CLOUD_PATH = "src/test/resources/enums/my-cloud.groovy";
private final static String INSTALLER_CLOUD_PATH = "src/test/resources/clouds/installer/some-cloud.groovy";
=======
private static final String SIMPLE_CLOUD_PATH = "src/test/resources/enums/my-cloud.groovy";
private static final String SIMPLE_BAD_CLOUD_PATH = "src/test/resources/enums/my-bad-cloud.groovy";
private final static String INSTALLER_CLOUD_PATH = "src/test/resources/clouds/installer/some-cloud.groovy";
>>>>>>>
private static final String SIMPLE_CLOUD_PATH = "src/test/resources/enums/my-cloud.groovy";
private static final String SIMPLE_BAD_CLOUD_PATH = "src/test/resources/enums/my-bad-cloud.groovy";
private static final String INSTALLER_CLOUD_PATH = "src/test/resources/clouds/installer/some-cloud.groovy"; |
<<<<<<<
new FunctionDef(FunctionTrace.signatures[4], FunctionTrace.class)
=======
new FunctionDef(FunctionTrace.signatures[4], FunctionTrace.class),
new FunctionDef(GetLibFunction.signatures[0], GetLibFunction.class),
new FunctionDef(GetLibInfoFunction.signatures[0], GetLibInfoFunction.class),
new FunctionDef(GetUptime.signature, GetUptime.class),
new FunctionDef(FunctionAvailable.signature, FunctionAvailable.class)
>>>>>>>
new FunctionDef(FunctionTrace.signatures[4], FunctionTrace.class),
new FunctionDef(FunctionAvailable.signature, FunctionAvailable.class) |
<<<<<<<
import static java.lang.Boolean.getBoolean;
import static java.lang.Integer.getInteger;
import static uk.co.real_logic.aeron.driver.Configuration.CONDUCTOR_BUFFER_LENGTH;
import static uk.co.real_logic.aeron.driver.Configuration.COUNTER_BUFFERS_LENGTH;
import static uk.co.real_logic.aeron.driver.Configuration.MTU_LENGTH_DEFAULT;
import static uk.co.real_logic.aeron.driver.Configuration.MTU_LENGTH_PROP_NAME;
import static uk.co.real_logic.aeron.driver.Configuration.TO_CLIENTS_BUFFER_LENGTH;
import static uk.co.real_logic.agrona.IoUtil.deleteIfExists;
import static uk.co.real_logic.agrona.IoUtil.mapNewFile;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Supplier;
import uk.co.real_logic.aeron.common.CncFileDescriptor;
import uk.co.real_logic.aeron.common.CommonContext;
import uk.co.real_logic.aeron.common.concurrent.SigIntBarrier;
=======
import uk.co.real_logic.aeron.common.*;
>>>>>>>
import static java.lang.Boolean.getBoolean;
import static java.lang.Integer.getInteger;
import static uk.co.real_logic.aeron.driver.Configuration.CONDUCTOR_BUFFER_LENGTH;
import static uk.co.real_logic.aeron.driver.Configuration.COUNTER_BUFFERS_LENGTH;
import static uk.co.real_logic.aeron.driver.Configuration.MTU_LENGTH_DEFAULT;
import static uk.co.real_logic.aeron.driver.Configuration.MTU_LENGTH_PROP_NAME;
import static uk.co.real_logic.aeron.driver.Configuration.TO_CLIENTS_BUFFER_LENGTH;
import static uk.co.real_logic.agrona.IoUtil.deleteIfExists;
import static uk.co.real_logic.agrona.IoUtil.mapNewFile;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Supplier;
import uk.co.real_logic.aeron.common.CncFileDescriptor;
import uk.co.real_logic.aeron.common.CommonContext; |
<<<<<<<
=======
/**
* Loaded modules, including ones bubbled up from imported modules.
*/
protected HashMap allModules = new HashMap();
/**
* Whether some modules were rebound to new instances since the last time this context's
* query was analyzed. (This assumes that each context is attached to at most one query.)
*/
private boolean modulesChanged = true;
>>>>>>>
<<<<<<<
=======
this.allModules = from.allModules;
this.mappedModules = from.mappedModules;
>>>>>>>
<<<<<<<
=======
if (dynamicOptions != null)
dynamicOptions.clear(); //clear any dynamic options
>>>>>>>
<<<<<<<
=======
/**
* @return iterator over all modules registered in the entire context tree
*/
public Iterator getRootModules() {
return getAllModules();
}
public Iterator getAllModules() {
return allModules.values().iterator();
}
>>>>>>>
<<<<<<<
/**
* For compiled expressions: check if the source of any
* module imported by the current query has changed since
* compilation.
*/
public boolean checkModulesValid() {
for(Iterator i = modules.values().iterator(); i.hasNext(); ) {
Module module = (Module)i.next();
if(!module.isInternalModule()) {
if(!((ExternalModule)module).moduleIsValid()) {
LOG.debug("Module with URI " + module.getNamespaceURI() +
" has changed and needs to be reloaded");
return false;
}
}
=======
public Module getRootModule(String namespaceURI) {
return (Module) allModules.get(namespaceURI);
}
public void setModule(String namespaceURI, Module module) {
if (module == null) {
modules.remove(namespaceURI); // unbind the module
} else {
modules.put(namespaceURI, module);
if (!module.isInternalModule()) {
((ModuleContext) ((ExternalModule) module).getContext()).setParentContext(this);
}
}
setRootModule(namespaceURI, module);
}
protected void setRootModule(String namespaceURI, Module module) {
if (module == null) {
allModules.remove(namespaceURI); // unbind the module
return;
}
if (allModules.get(namespaceURI) != module) setModulesChanged();
allModules.put(namespaceURI, module);
}
void setModulesChanged() {
this.modulesChanged = true;
}
public void analyzeAndOptimizeIfModulesChanged(PathExpr expr) throws XPathException {
if (modulesChanged) {
expr.analyze(new AnalyzeContextInfo());
if (optimizationsEnabled()) {
Optimizer optimizer = new Optimizer(this);
expr.accept(optimizer);
if (optimizer.hasOptimized()) {
reset(true);
expr.resetState(true);
expr.analyze(new AnalyzeContextInfo());
}
}
modulesChanged = false;
>>>>>>>
/**
* For compiled expressions: check if the source of any
* module imported by the current query has changed since
* compilation.
*/
public boolean checkModulesValid() {
for(Iterator i = modules.values().iterator(); i.hasNext(); ) {
Module module = (Module)i.next();
if(!module.isInternalModule()) {
if(!((ExternalModule)module).moduleIsValid()) {
LOG.debug("Module with URI " + module.getNamespaceURI() +
" has changed and needs to be reloaded");
return false;
}
}
<<<<<<<
public void importModule(String namespaceURI, String prefix, String location)
throws XPathException {
Module module = getModule(namespaceURI);
=======
public void importModule(String namespaceURI, String prefix, String location) throws XPathException {
Module module = getRootModule(namespaceURI);
>>>>>>>
public void importModule(String namespaceURI, String prefix, String location)
throws XPathException {
Module module = getModule(namespaceURI);
<<<<<<<
=======
private ExternalModule compileOrBorrowModule(String prefix, String namespaceURI, String location, Source source) throws XPathException {
ExternalModule module = broker.getBrokerPool().getXQueryPool().borrowModule(broker, source, this);
if (module == null) {
module = compileModule(prefix, namespaceURI, location, source);
} else {
for (Iterator it = module.getContext().getAllModules(); it.hasNext();) {
Module importedModule = (Module) it.next();
if (importedModule != null &&
!allModules.containsKey(importedModule.getNamespaceURI())) {
setRootModule(importedModule.getNamespaceURI(), importedModule);
}
}
}
setModule(module.getNamespaceURI(), module);
declareModuleVars(module);
return module;
}
>>>>>>> |
<<<<<<<
private final File archiveDir;
=======
>>>>>>>
private final File archiveDir;
<<<<<<<
this.archiveDir = archiveDir;
byteBuffer = allocateDirectAligned(RECORD_LENGTH, PAGE_SIZE);
unsafeBuffer = new UnsafeBuffer(byteBuffer);
recordingDescriptorEncoder.wrap(unsafeBuffer, CATALOG_FRAME_LENGTH);
final File catalogFile = new File(archiveDir, CATALOG_FILE_NAME);
=======
>>>>>>>
this.archiveDir = archiveDir;
recordingDescriptorEncoder.wrap(unsafeBuffer, CATALOG_FRAME_LENGTH);
final File catalogFile = new File(archiveDir, CATALOG_FILE_NAME); |
<<<<<<<
=======
public static final int LOG_BUFFER_SZ = LogBufferDescriptor.LOG_MIN_SIZE;
>>>>>>>
<<<<<<<
=======
private AtomicBuffer[] logBuffersSession1 = new AtomicBuffer[TermHelper.BUFFER_COUNT];
private AtomicBuffer[] logBuffersSession2 = new AtomicBuffer[TermHelper.BUFFER_COUNT];
private AtomicBuffer[] stateBuffersSession1 = new AtomicBuffer[TermHelper.BUFFER_COUNT];
private AtomicBuffer[] stateBuffersSession2 = new AtomicBuffer[TermHelper.BUFFER_COUNT];
private BufferLifecycleStrategy mockBufferUsage = mock(BufferLifecycleStrategy.class);
>>>>>>>
<<<<<<<
=======
for (int i = 0; i < TermHelper.BUFFER_COUNT; i++)
{
logBuffersSession1[i] = new AtomicBuffer(new byte[LOG_BUFFER_SZ]);
stateBuffersSession1[i] = new AtomicBuffer(new byte[STATE_BUFFER_LENGTH]);
logBuffersSession2[i] = new AtomicBuffer(new byte[LOG_BUFFER_SZ]);
stateBuffersSession2[i] = new AtomicBuffer(new byte[STATE_BUFFER_LENGTH]);
when(mockBufferUsage.newBuffer(eq(SESSION_ID_1 + "-log-" + i), anyInt(), anyInt()))
.thenReturn(logBuffersSession1[i]);
when(mockBufferUsage.newBuffer(eq(SESSION_ID_1 + "-state-" + i), anyInt(), anyInt()))
.thenReturn(stateBuffersSession1[i]);
when(mockBufferUsage.newBuffer(eq(SESSION_ID_2 + "-log-" + i), anyInt(), anyInt()))
.thenReturn(logBuffersSession2[i]);
when(mockBufferUsage.newBuffer(eq(SESSION_ID_2 + "-state-" + i), anyInt(), anyInt()))
.thenReturn(stateBuffersSession2[i]);
}
>>>>>>>
<<<<<<<
=======
private void signalWillTimeOut()
{
doAnswer(
(invocation) ->
{
Thread.sleep(AWAIT_TIMEOUT + 1);
return null;
}).when(signal).await(anyLong());
}
>>>>>>>
<<<<<<<
(invocation) ->
{
Thread.sleep(AWAIT_TIMEOUT + 1);
return null;
}).when(signal).await(anyLong());
}
private void willNotifyOperationSucceeded()
{
doAnswer(
(invocation) ->
{
conductor.operationSucceeded();
return null;
}).when(signal).await(anyLong());
}
private void willNotifyNewBuffer()
{
doAnswer(
invocation ->
{
sendNewBufferNotification(ON_NEW_PUBLICATION, SESSION_ID_1, TERM_ID_1);
conductor.doWork();
return null;
}).when(signal).await(anyLong());
=======
(invocation) ->
{
conductor.operationSucceeded();
return null;
}).when(signal).await(anyLong());
>>>>>>>
(invocation) ->
{
Thread.sleep(AWAIT_TIMEOUT + 1);
return null;
}).when(signal).await(anyLong());
}
private void willNotifyOperationSucceeded()
{
doAnswer(
(invocation) ->
{
conductor.operationSucceeded();
return null;
}).when(signal).await(anyLong());
}
private void willNotifyNewBuffer()
{
doAnswer(
invocation ->
{
sendNewBufferNotification(ON_NEW_PUBLICATION, SESSION_ID_1, TERM_ID_1);
conductor.doWork();
return null;
}).when(signal).await(anyLong()); |
<<<<<<<
* @return
* @throws XPathException
=======
* @throws XPathException
>>>>>>>
* @throws XPathException
* @throws XPathException |
<<<<<<<
import static uk.co.real_logic.aeron.samples.SamplesUtil.printStringMessage;
import java.util.concurrent.atomic.AtomicBoolean;
=======
import uk.co.real_logic.agrona.CloseHelper;
import uk.co.real_logic.agrona.concurrent.SigInt;
>>>>>>>
import static uk.co.real_logic.aeron.samples.SamplesUtil.printStringMessage;
import java.util.concurrent.atomic.AtomicBoolean;
<<<<<<<
import uk.co.real_logic.aeron.common.concurrent.SigInt;
=======
>>>>>>> |
<<<<<<<
=======
public void testStrFunctions() {
try {
XMLResource resource = (XMLResource) testCollection.createResource("mondial-test.xml", "XMLResource");
resource.setContent(CITY);
testCollection.storeResource(resource);
XPathQueryService service = (XPathQueryService) testCollection.getService("XPathQueryService", "1.0");
queryResource(service, "mondial-test.xml", "//city[starts-with(name, 'Berl')]", 1);
queryResource(service, "mondial-test.xml", "//city[starts-with(name, 'Berlin')]", 1);
queryResource(service, "mondial-test.xml", "//city[starts-with(name, 'erlin')]", 0);
queryResource(service, "mondial-test.xml", "//city[starts-with(name, 'Erl')]", 1);
queryResource(service, "mondial-test.xml", "//city[contains(name, 'erl')]", 1);
queryResource(service, "mondial-test.xml", "//city[contains(name, 'Berlin')]", 1);
queryResource(service, "mondial-test.xml", "//city[contains(name, 'Erl')]", 1);
queryResource(service, "mondial-test.xml", "//city[ends-with(name, 'Berlin')]", 1);
queryResource(service, "mondial-test.xml", "//city[ends-with(name, 'erlin')]", 1);
queryResource(service, "mondial-test.xml", "//city[ends-with(name, 'Ber')]", 0);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'erl', 'i')]", 2);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'Erl')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'Berlin', 'i')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'berlin', 'i')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'berlin')]", 0);
queryResource(service, "mondial-test.xml", "//city[matches(name, '^Berlin$')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'lin$', 'i')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, '.*lin$', 'i')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, '^lin$', 'i')]", 0);
} catch (XMLDBException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public void testIndexScan() {
try {
System.out.println("----- testIndexScan -----");
String queryBody =
"declare namespace f=\'http://exist-db.org/xquery/test\';\n" +
"declare namespace mods='http://www.loc.gov/mods/v3';\n" +
"import module namespace u=\'http://exist-db.org/xquery/util\';\n" +
"\n" +
"declare function f:term-callback($term as xs:string, $data as xs:int+)\n" +
"as element()+ {\n" +
" <item>\n" +
" <term>{$term}</term>\n" +
" <frequency>{$data[1]}</frequency>\n" +
" </item>\n" +
"};\n" +
"\n";
XPathQueryService service = storeXMLFileAndGetQueryService("items.xml", "src/org/exist/xquery/test/items.xml");
String query = queryBody + "u:index-keys(//item/name, \'\', util:function(\'f:term-callback\', 2), 1000)";
ResourceSet result = service.query(query);
for (ResourceIterator i = result.getIterator(); i.hasMoreResources(); ) {
System.out.println(i.nextResource().getContent());
}
assertEquals(7, result.getSize());
} catch (XMLDBException e) {
fail(e.getMessage());
}
}
>>>>>>>
public void testStrFunctions() {
try {
XMLResource resource = (XMLResource) testCollection.createResource("mondial-test.xml", "XMLResource");
resource.setContent(CITY);
testCollection.storeResource(resource);
XPathQueryService service = (XPathQueryService) testCollection.getService("XPathQueryService", "1.0");
queryResource(service, "mondial-test.xml", "//city[starts-with(name, 'Berl')]", 1);
queryResource(service, "mondial-test.xml", "//city[starts-with(name, 'Berlin')]", 1);
queryResource(service, "mondial-test.xml", "//city[starts-with(name, 'erlin')]", 0);
queryResource(service, "mondial-test.xml", "//city[starts-with(name, 'Erl')]", 1);
queryResource(service, "mondial-test.xml", "//city[contains(name, 'erl')]", 1);
queryResource(service, "mondial-test.xml", "//city[contains(name, 'Berlin')]", 1);
queryResource(service, "mondial-test.xml", "//city[contains(name, 'Erl')]", 1);
queryResource(service, "mondial-test.xml", "//city[ends-with(name, 'Berlin')]", 1);
queryResource(service, "mondial-test.xml", "//city[ends-with(name, 'erlin')]", 1);
queryResource(service, "mondial-test.xml", "//city[ends-with(name, 'Ber')]", 0);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'erl', 'i')]", 2);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'Erl')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'Berlin', 'i')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'berlin', 'i')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'berlin')]", 0);
queryResource(service, "mondial-test.xml", "//city[matches(name, '^Berlin$')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, 'lin$', 'i')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, '.*lin$', 'i')]", 1);
queryResource(service, "mondial-test.xml", "//city[matches(name, '^lin$', 'i')]", 0);
} catch (XMLDBException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public void testIndexScan() {
try {
System.out.println("----- testIndexScan -----");
String queryBody =
"declare namespace f=\'http://exist-db.org/xquery/test\';\n" +
"declare namespace mods='http://www.loc.gov/mods/v3';\n" +
"import module namespace u=\'http://exist-db.org/xquery/util\';\n" +
"\n" +
"declare function f:term-callback($term as xs:string, $data as xs:int+)\n" +
"as element()+ {\n" +
" <item>\n" +
" <term>{$term}</term>\n" +
" <frequency>{$data[1]}</frequency>\n" +
" </item>\n" +
"};\n" +
"\n";
XPathQueryService service = storeXMLFileAndGetQueryService("items.xml", "src/org/exist/xquery/test/items.xml");
String query = queryBody + "u:index-keys(//item/name, \'\', util:function(\'f:term-callback\', 2), 1000)";
ResourceSet result = service.query(query);
for (ResourceIterator i = result.getIterator(); i.hasMoreResources(); ) {
System.out.println(i.nextResource().getContent());
}
assertEquals(7, result.getSize());
} catch (XMLDBException e) {
fail(e.getMessage());
}
} |
<<<<<<<
import static com.salesforce.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.junit.Assert.assertEquals;
=======
import static org.junit.Assert.assertTrue;
>>>>>>>
import static com.salesforce.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
<<<<<<<
import java.sql.SQLException;
=======
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
>>>>>>>
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
<<<<<<<
import com.salesforce.phoenix.schema.TableNotFoundException;
=======
import com.salesforce.phoenix.jdbc.PhoenixStatement;
import com.salesforce.phoenix.query.KeyRange;
>>>>>>>
import com.salesforce.phoenix.schema.TableNotFoundException;
import com.salesforce.phoenix.jdbc.PhoenixStatement;
import com.salesforce.phoenix.query.KeyRange; |
<<<<<<<
import com.salesforce.phoenix.expression.*;
import com.salesforce.phoenix.join.ScanProjector;
=======
import com.salesforce.phoenix.expression.ColumnExpression;
import com.salesforce.phoenix.expression.IndexKeyValueColumnExpression;
import com.salesforce.phoenix.expression.KeyValueColumnExpression;
import com.salesforce.phoenix.expression.RowKeyColumnExpression;
>>>>>>>
import com.salesforce.phoenix.expression.ColumnExpression;
import com.salesforce.phoenix.expression.IndexKeyValueColumnExpression;
import com.salesforce.phoenix.expression.KeyValueColumnExpression;
import com.salesforce.phoenix.expression.RowKeyColumnExpression;
import com.salesforce.phoenix.join.ScanProjector;
<<<<<<<
=======
} else {
if (tableRef.getTable().getType() == PTableType.INDEX) {
return new IndexKeyValueColumnExpression(getColumn());
} else {
return new KeyValueColumnExpression(getColumn());
}
>>>>>>>
}
if (tableRef.getTable().getType() == PTableType.INDEX) {
return new IndexKeyValueColumnExpression(getColumn()); |
<<<<<<<
import com.salesforce.phoenix.parse.AliasedNode;
=======
import com.salesforce.phoenix.optimize.QueryOptimizer;
>>>>>>>
import com.salesforce.phoenix.parse.AliasedNode;
import com.salesforce.phoenix.optimize.QueryOptimizer;
<<<<<<<
Scan scan = new Scan();
final StatementContext context = new StatementContext(upsert, connection, resolver, binds, scan);
=======
>>>>>>> |
<<<<<<<
import static com.salesforce.phoenix.schema.PDataType.CHAR;
import static com.salesforce.phoenix.schema.PDataType.VARCHAR;
import static com.salesforce.phoenix.schema.PTable.BASE_TABLE_PROP_NAME;
=======
import static com.salesforce.phoenix.query.QueryServices.DROP_METADATA_ATTRIB;
import static com.salesforce.phoenix.query.QueryServicesOptions.DEFAULT_DROP_METADATA;
>>>>>>>
import static com.salesforce.phoenix.query.QueryServices.DROP_METADATA_ATTRIB;
import static com.salesforce.phoenix.query.QueryServicesOptions.DEFAULT_DROP_METADATA;
import static com.salesforce.phoenix.schema.PDataType.CHAR;
import static com.salesforce.phoenix.schema.PDataType.VARCHAR;
import static com.salesforce.phoenix.schema.PTable.BASE_TABLE_PROP_NAME;
<<<<<<<
PreparedStatement tableUpsert = connection.prepareStatement(SchemaUtil.isMetaTable(schemaName, tableName) ? MUTATE_SYSTEM_TABLE : MUTATE_TABLE);
tableUpsert.setString(1, connection.getTenantId() == null ? null : connection.getTenantId().getString());
tableUpsert.setString(2, schemaName);
tableUpsert.setString(3, tableName);
tableUpsert.setString(4, table.getType().getSerializedValue());
tableUpsert.setLong(5, seqNum);
tableUpsert.setInt(6, totalColumnCount + columnCountDelta);
if (tableUpsert.getParameterMetaData().getParameterCount() > 6) {
tableUpsert.setBoolean(7, isImmutableRows);
=======
PreparedStatement tableUpsert = null;
try {
tableUpsert = connection.prepareStatement(SchemaUtil.isMetaTable(schemaName, tableName) ? MUTATE_SYSTEM_TABLE : MUTATE_TABLE);
tableUpsert.setString(1, schemaName);
tableUpsert.setString(2, tableName);
tableUpsert.setString(3, table.getType().getSerializedValue());
tableUpsert.setLong(4, seqNum);
tableUpsert.setInt(5, totalColumnCount + columnCountDelta);
if (tableUpsert.getParameterMetaData().getParameterCount() > 5) {
tableUpsert.setBoolean(6, isImmutableRows);
}
tableUpsert.execute();
} finally {
if(tableUpsert != null) {
tableUpsert.close();
}
>>>>>>>
PreparedStatement tableUpsert = null;
try {
tableUpsert = connection.prepareStatement(SchemaUtil.isMetaTable(schemaName, tableName) ? MUTATE_SYSTEM_TABLE : MUTATE_TABLE);
tableUpsert.setString(1, connection.getTenantId() == null ? null : connection.getTenantId().getString());
tableUpsert.setString(2, schemaName);
tableUpsert.setString(3, tableName);
tableUpsert.setString(4, table.getType().getSerializedValue());
tableUpsert.setLong(5, seqNum);
tableUpsert.setInt(6, totalColumnCount + columnCountDelta);
if (tableUpsert.getParameterMetaData().getParameterCount() > 6) {
tableUpsert.setBoolean(7, isImmutableRows);
}
tableUpsert.execute();
} finally {
if(tableUpsert != null) {
tableUpsert.close();
}
<<<<<<<
FromCompiler.getResolver(statement, connection);
PreparedStatement tableUpsert = connection.prepareStatement(UPDATE_INDEX_STATE);
tableUpsert.setString(1, connection.getTenantId() == null ? null : connection.getTenantId().getString());
tableUpsert.setString(2, schemaName);
tableUpsert.setString(3, indexName);
tableUpsert.setString(4, statement.getIndexState().getSerializedValue());
tableUpsert.execute();
=======
TableRef indexRef = FromCompiler.getResolver(statement, connection).getTables().get(0);
PreparedStatement tableUpsert = null;
try {
tableUpsert = connection.prepareStatement(UPDATE_INDEX_STATE);
tableUpsert.setString(1, schemaName);
tableUpsert.setString(2, indexName);
tableUpsert.setString(3, newIndexState.getSerializedValue());
tableUpsert.execute();
} finally {
if(tableUpsert != null) {
tableUpsert.close();
}
}
>>>>>>>
TableRef indexRef = FromCompiler.getResolver(statement, connection).getTables().get(0);
PreparedStatement tableUpsert = null;
try {
tableUpsert = connection.prepareStatement(UPDATE_INDEX_STATE);
tableUpsert.setString(1, connection.getTenantId() == null ? null : connection.getTenantId().getString());
tableUpsert.setString(2, schemaName);
tableUpsert.setString(3, indexName);
tableUpsert.setString(4, newIndexState.getSerializedValue());
tableUpsert.execute();
} finally {
if(tableUpsert != null) {
tableUpsert.close();
}
} |
<<<<<<<
result = doDropTable(key, tenantIdBytes, schemaName, tableName, PTableType.fromSerializedValue(tableType), tableMetadata, invalidateList, lids);
=======
result = doDropTable(key, schemaName, tableName, PTableType.fromSerializedValue(tableType), tableMetadata, invalidateList, lids, tableNamesToDelete);
>>>>>>>
result = doDropTable(key, tenantIdBytes, schemaName, tableName, PTableType.fromSerializedValue(tableType), tableMetadata, invalidateList, lids, tableNamesToDelete);
<<<<<<<
private MetaDataMutationResult doDropTable(byte[] key, byte[] tenantId, byte[] schemaName, byte[] tableName, PTableType tableType,
List<Mutation> rowsToDelete, List<ImmutableBytesPtr> invalidateList, List<Integer> lids) throws IOException, SQLException {
=======
private MetaDataMutationResult doDropTable(byte[] key, byte[] schemaName, byte[] tableName, PTableType tableType,
List<Mutation> rowsToDelete, List<ImmutableBytesPtr> invalidateList, List<Integer> lids, List<byte[]> tableNamesToDelete) throws IOException, SQLException {
>>>>>>>
private MetaDataMutationResult doDropTable(byte[] key, byte[] tenantId, byte[] schemaName, byte[] tableName, PTableType tableType,
List<Mutation> rowsToDelete, List<ImmutableBytesPtr> invalidateList, List<Integer> lids, List<byte[]> tableNamesToDelete) throws IOException, SQLException {
<<<<<<<
MetaDataMutationResult result = doDropTable(indexKey, tenantId, schemaName, indexName, PTableType.INDEX, rowsToDelete, invalidateList, lids);
=======
MetaDataMutationResult result = doDropTable(indexKey, schemaName, indexName, PTableType.INDEX, rowsToDelete, invalidateList, lids, tableNamesToDelete);
>>>>>>>
MetaDataMutationResult result = doDropTable(indexKey, tenantId, schemaName, indexName, PTableType.INDEX, rowsToDelete, invalidateList, lids, tableNamesToDelete);
<<<<<<<
doDropTable(indexKey, tenantId, index.getSchemaName().getBytes(), index.getTableName().getBytes(), index.getType(), additionalTableMetaData, invalidateList, lids);
=======
doDropTable(indexKey, index.getSchemaName().getBytes(), index.getTableName().getBytes(), index.getType(), additionalTableMetaData, invalidateList, lids, tableNamesToDelete);
>>>>>>>
doDropTable(indexKey, tenantId, index.getSchemaName().getBytes(), index.getTableName().getBytes(), index.getType(), additionalTableMetaData, invalidateList, lids, tableNamesToDelete); |
<<<<<<<
import com.salesforce.phoenix.join.HashCacheClient;
import com.salesforce.phoenix.parse.HintNode;
import com.salesforce.phoenix.parse.HintNode.Hint;
=======
import com.salesforce.phoenix.parse.BindableStatement;
>>>>>>>
import com.salesforce.phoenix.join.HashCacheClient;
import com.salesforce.phoenix.parse.BindableStatement;
<<<<<<<
private final HintNode hintNode;
private final HashCacheClient hashClient;
private boolean isAggregate;
=======
>>>>>>>
<<<<<<<
public StatementContext(PhoenixConnection connection, ColumnResolver resolver, List<Object> binds, int bindCount, Scan scan) {
this(connection, resolver, binds, bindCount, scan, null);
}
public StatementContext(PhoenixConnection connection, ColumnResolver resolver, List<Object> binds, int bindCount, Scan scan, HintNode hintNode) {
this(connection, resolver, binds, bindCount, scan, hintNode, null);
}
public StatementContext(PhoenixConnection connection, ColumnResolver resolver, List<Object> binds, int bindCount, Scan scan, HintNode hintNode, HashCacheClient hashClient) {
=======
public StatementContext(BindableStatement statement, PhoenixConnection connection, ColumnResolver resolver, List<Object> binds, Scan scan) {
>>>>>>>
private final HashCacheClient hashClient;
public StatementContext(BindableStatement statement, PhoenixConnection connection, ColumnResolver resolver, List<Object> binds, Scan scan) {
this(statement, connection, resolver, binds, scan, null);
}
public StatementContext(BindableStatement statement, PhoenixConnection connection, ColumnResolver resolver, List<Object> binds, Scan scan, HashCacheClient hashClient) {
<<<<<<<
this.hintNode = hintNode;
this.hashClient = hashClient;
}
public boolean hasHint(Hint hint) {
return hintNode == null ? false : hintNode.hasHint(hint);
}
public String getHint(Hint hint) {
return hintNode == null ? null : hintNode.getHint(hint);
=======
>>>>>>>
this.hashClient = hashClient; |
<<<<<<<
import com.salesforce.phoenix.execute.*;
=======
import com.salesforce.phoenix.execute.AggregatePlan;
import com.salesforce.phoenix.execute.BasicQueryPlan;
import com.salesforce.phoenix.execute.HashJoinPlan;
import com.salesforce.phoenix.execute.ScanPlan;
>>>>>>>
import com.salesforce.phoenix.execute.*;
<<<<<<<
import com.salesforce.phoenix.parse.ParseNode;
import com.salesforce.phoenix.parse.SelectStatement;
=======
import com.salesforce.phoenix.join.HashCacheClient;
import com.salesforce.phoenix.join.HashJoinInfo;
import com.salesforce.phoenix.parse.*;
import com.salesforce.phoenix.parse.JoinTableNode.JoinType;
>>>>>>>
import com.salesforce.phoenix.parse.ParseNode;
import com.salesforce.phoenix.parse.SelectStatement;
import com.salesforce.phoenix.join.HashCacheClient;
import com.salesforce.phoenix.join.HashJoinInfo;
import com.salesforce.phoenix.parse.*;
import com.salesforce.phoenix.parse.JoinTableNode.JoinType;
<<<<<<<
ColumnResolver resolver = FromCompiler.getResolver(statement, connection);
TableRef tableRef = resolver.getTables().get(0);
PTable table = tableRef.getTable();
StatementContext context = new StatementContext(connection, resolver, binds, statement.getBindCount(), scan, statement.getHint());
if (table.getType() == PTableType.INDEX && table.getIndexState() != PIndexState.ACTIVE) {
return new DegenerateQueryPlan(context, tableRef);
}
=======
List<TableNode> fromNodes = statement.getFrom();
if (fromNodes.size() == 1) {
ColumnResolver resolver = FromCompiler.getResolver(statement, connection);
StatementContext context = new StatementContext(connection, resolver, binds, statement.getBindCount(), scan, statement.getHint());
return compile(context, statement, binds);
}
JoinedColumnResolver resolver = JoinCompiler.getResolver(statement, connection);
StatementContext context = new StatementContext(connection, resolver, binds, statement.getBindCount(), scan, statement.getHint(), new HashCacheClient(connection.getQueryServices(), connection.getTenantId()));
return compile(context, statement, binds);
}
@SuppressWarnings("unchecked")
protected QueryPlan compile(StatementContext context, SelectStatement statement, List<Object> binds, JoinSpec join) throws SQLException {
StarJoinType starJoin = JoinCompiler.getStarJoinType(join);
if (starJoin == StarJoinType.BASIC || starJoin == StarJoinType.EXTENDED) {
List<JoinTable> joinTables = join.getJoinTables();
int count = joinTables.size();
ImmutableBytesWritable[] joinIds = new ImmutableBytesWritable[count];
List<Expression>[] joinExpressions = (List<Expression>[]) new List[count];
List<Expression>[] hashExpressions = (List<Expression>[]) new List[count];
JoinType[] joinTypes = new JoinType[count];
QueryPlan[] joinPlans = new QueryPlan[count];
for (int i = 0; i < count; i++) {
joinIds[i] = new ImmutableBytesWritable(HashCacheClient.nextJoinId());
Pair<List<Expression>, List<Expression>> splittedExpressions = JoinCompiler.splitEquiJoinConditions(joinTables.get(i));
joinExpressions[i] = splittedExpressions.getFirst();
hashExpressions[i] = splittedExpressions.getSecond();
joinTypes[i] = joinTables.get(i).getType();
}
HashJoinInfo joinInfo = new HashJoinInfo(joinIds, joinExpressions, joinTypes);
HashJoinInfo.serializeHashJoinIntoScan(context.getScan(), joinInfo);
BasicQueryPlan plan = compile(context, JoinCompiler.newSelectWithoutJoin(statement), binds);
return new HashJoinPlan(plan, joinIds, hashExpressions, joinPlans);
}
return null;
}
protected BasicQueryPlan compile(StatementContext context, SelectStatement statement, List<Object> binds) throws SQLException{
ColumnResolver resolver = context.getResolver();
>>>>>>>
List<TableNode> fromNodes = statement.getFrom();
if (fromNodes.size() == 1) {
ColumnResolver resolver = FromCompiler.getResolver(statement, connection);
TableRef tableRef = resolver.getTables().get(0);
PTable table = tableRef.getTable();
StatementContext context = new StatementContext(connection, resolver, binds, statement.getBindCount(), scan, statement.getHint());
if (table.getType() == PTableType.INDEX && table.getIndexState() != PIndexState.ACTIVE) {
return new DegenerateQueryPlan(context, tableRef);
}
return compile(context, statement, binds);
}
JoinedColumnResolver resolver = JoinCompiler.getResolver(statement, connection);
StatementContext context = new StatementContext(connection, resolver, binds, statement.getBindCount(), scan, statement.getHint(), new HashCacheClient(connection.getQueryServices(), connection.getTenantId()));
return compile(context, statement, binds);
}
@SuppressWarnings("unchecked")
protected QueryPlan compile(StatementContext context, SelectStatement statement, List<Object> binds, JoinSpec join) throws SQLException {
StarJoinType starJoin = JoinCompiler.getStarJoinType(join);
if (starJoin == StarJoinType.BASIC || starJoin == StarJoinType.EXTENDED) {
List<JoinTable> joinTables = join.getJoinTables();
int count = joinTables.size();
ImmutableBytesWritable[] joinIds = new ImmutableBytesWritable[count];
List<Expression>[] joinExpressions = (List<Expression>[]) new List[count];
List<Expression>[] hashExpressions = (List<Expression>[]) new List[count];
JoinType[] joinTypes = new JoinType[count];
QueryPlan[] joinPlans = new QueryPlan[count];
for (int i = 0; i < count; i++) {
joinIds[i] = new ImmutableBytesWritable(HashCacheClient.nextJoinId());
Pair<List<Expression>, List<Expression>> splittedExpressions = JoinCompiler.splitEquiJoinConditions(joinTables.get(i));
joinExpressions[i] = splittedExpressions.getFirst();
hashExpressions[i] = splittedExpressions.getSecond();
joinTypes[i] = joinTables.get(i).getType();
}
HashJoinInfo joinInfo = new HashJoinInfo(joinIds, joinExpressions, joinTypes);
HashJoinInfo.serializeHashJoinIntoScan(context.getScan(), joinInfo);
BasicQueryPlan plan = compile(context, JoinCompiler.newSelectWithoutJoin(statement), binds);
return new HashJoinPlan(plan, joinIds, hashExpressions, joinPlans);
}
return null;
}
protected BasicQueryPlan compile(StatementContext context, SelectStatement statement, List<Object> binds) throws SQLException{
ColumnResolver resolver = context.getResolver();
TableRef tableRef = resolver.getTables().get(0); |
<<<<<<<
int ordinalPosition = theTable.getColumns().size();
List<PColumn> dynamicColumns = new ArrayList<PColumn>();
dynamicColumns.addAll(theTable.getColumns());
for(ColumnDef cdef:dynamicColumnDefs){
dynamicColumns.add(client.newColumn(ordinalPosition, cdef, null));
ordinalPosition++;
}
//redeclare the new tableImpl with the dynamicColumnDefs
theTable = new PTableImpl(theTable.getName(), theTable.getType(), theTable.getTimeStamp(),theTable.getSequenceNumber(), theTable.getPKName(), dynamicColumns);
=======
int ordinalPosition = theTable.getColumns().size();
List<PColumn> dynamicColumns = new ArrayList<PColumn>();
dynamicColumns.addAll(theTable.getColumns());
for(ColumnDef cdef:dynamicColumnDefs){
dynamicColumns.add(client.newColumn(ordinalPosition, cdef, Collections.<String>emptySet()));
ordinalPosition++;
}
//redeclare the new tableImpl with the dynamicColumnDefs
theTable = new PTableImpl(theTable.getName(), theTable.getType(), theTable.getTimeStamp(),
theTable.getSequenceNumber(), theTable.getPKName(), theTable.getBucketNum(), dynamicColumns);
>>>>>>>
int ordinalPosition = theTable.getColumns().size();
List<PColumn> dynamicColumns = new ArrayList<PColumn>();
dynamicColumns.addAll(theTable.getColumns());
for(ColumnDef cdef:dynamicColumnDefs){
dynamicColumns.add(client.newColumn(ordinalPosition, cdef, null));
ordinalPosition++;
}
//redeclare the new tableImpl with the dynamicColumnDefs
theTable = new PTableImpl(theTable.getName(), theTable.getType(), theTable.getTimeStamp(),
theTable.getSequenceNumber(), theTable.getPKName(), theTable.getBucketNum(), dynamicColumns); |
<<<<<<<
@Test
public void testRowValueConstructorQuery() throws Exception {
SQLParser parser = new SQLParser(
new StringReader(
"select a_integer FROM aTable where (x_integer, y_integer) > (3, 4)"));
parser.parseStatement();
}
=======
@Test
public void testSingleTopLevelNot() throws Exception {
SQLParser parser = new SQLParser(
new StringReader(
"select * from t where not c = 5"));
parser.parseStatement();
}
@Test
public void testTopLevelNot() throws Exception {
SQLParser parser = new SQLParser(
new StringReader(
"select * from t where not c"));
parser.parseStatement();
}
@Test
public void testHavingWithNot() throws Exception {
SQLParser parser = new SQLParser(
new StringReader(
"select\n" +
"\"WEB_STAT_ALIAS\".\"DOMAIN\" as \"c0\"\n" +
"from \"WEB_STAT\" \"WEB_STAT_ALIAS\"\n" +
"group by \"WEB_STAT_ALIAS\".\"DOMAIN\" having\n" +
"(\n" +
"(\n" +
"NOT\n" +
"(\n" +
"(sum(\"WEB_STAT_ALIAS\".\"ACTIVE_VISITOR\") is null)\n" +
")\n" +
"OR NOT((sum(\"WEB_STAT_ALIAS\".\"ACTIVE_VISITOR\") is null))\n" +
")\n" +
"OR NOT((sum(\"WEB_STAT_ALIAS\".\"ACTIVE_VISITOR\") is null))\n" +
")\n" +
"order by CASE WHEN \"WEB_STAT_ALIAS\".\"DOMAIN\" IS NULL THEN 1 ELSE 0 END,\n" +
"\"WEB_STAT_ALIAS\".\"DOMAIN\" ASC"));
parser.parseStatement();
}
>>>>>>>
@Test
public void testRowValueConstructorQuery() throws Exception {
SQLParser parser = new SQLParser(
new StringReader(
"select a_integer FROM aTable where (x_integer, y_integer) > (3, 4)"));
parser.parseStatement();
}
@Test
public void testSingleTopLevelNot() throws Exception {
SQLParser parser = new SQLParser(
new StringReader(
"select * from t where not c = 5"));
parser.parseStatement();
}
@Test
public void testTopLevelNot() throws Exception {
SQLParser parser = new SQLParser(
new StringReader(
"select * from t where not c"));
parser.parseStatement();
}
@Test
public void testHavingWithNot() throws Exception {
SQLParser parser = new SQLParser(
new StringReader(
"select\n" +
"\"WEB_STAT_ALIAS\".\"DOMAIN\" as \"c0\"\n" +
"from \"WEB_STAT\" \"WEB_STAT_ALIAS\"\n" +
"group by \"WEB_STAT_ALIAS\".\"DOMAIN\" having\n" +
"(\n" +
"(\n" +
"NOT\n" +
"(\n" +
"(sum(\"WEB_STAT_ALIAS\".\"ACTIVE_VISITOR\") is null)\n" +
")\n" +
"OR NOT((sum(\"WEB_STAT_ALIAS\".\"ACTIVE_VISITOR\") is null))\n" +
")\n" +
"OR NOT((sum(\"WEB_STAT_ALIAS\".\"ACTIVE_VISITOR\") is null))\n" +
")\n" +
"order by CASE WHEN \"WEB_STAT_ALIAS\".\"DOMAIN\" IS NULL THEN 1 ELSE 0 END,\n" +
"\"WEB_STAT_ALIAS\".\"DOMAIN\" ASC"));
parser.parseStatement();
} |
<<<<<<<
public QueryResponse<CiData> queryCiData(Integer ciTypeId, QueryRequest queryObject) {
=======
public QueryResponse<?> queryCiData(Integer ciTypeId, QueryRequest queryObject) {
if (queryObject == null) {
queryObject = QueryRequest.defaultQueryObject().descendingSortBy(CmdbConstants.DEFAULT_FIELD_CREATED_DATE);
} else if (queryObject.getSorting() == null || queryObject.getSorting().getField() == null) {
queryObject.setSorting(new Sorting(false, CmdbConstants.DEFAULT_FIELD_CREATED_DATE));
}
>>>>>>>
public QueryResponse<CiData> queryCiData(Integer ciTypeId, QueryRequest queryObject) {
if (queryObject == null) {
queryObject = QueryRequest.defaultQueryObject().descendingSortBy(CmdbConstants.DEFAULT_FIELD_CREATED_DATE);
} else if (queryObject.getSorting() == null || queryObject.getSorting().getField() == null) {
queryObject.setSorting(new Sorting(false, CmdbConstants.DEFAULT_FIELD_CREATED_DATE));
} |
<<<<<<<
if (!ciService.queryWithFilters(ciTypeId, Lists.newArrayList(new Filter(attr.getPropertyName(), FilterOperator.Equal.getCode(), val))).isEmpty()) {
throw new InvalidArgumentException(String.format("The given attribute [properyName:%s] val [%s] is not unique.", attr.getPropertyName(), String.valueOf(val)))
.withErrorCode("3242", attr.getPropertyName(), String.valueOf(val));
=======
if (!ciService.queryWithFilters(ciTypeId,
Lists.newArrayList(new Filter(attr.getPropertyName(), FilterOperator.Equal.getCode(), val)),
Lists.newArrayList(GUID)).isEmpty()) {
throw new InvalidArgumentException(String.format("The given attribute [properyName:%s] val [%s] is not unique.", attr.getPropertyName(), String.valueOf(val)));
>>>>>>>
if (!ciService.queryWithFilters(ciTypeId,
Lists.newArrayList(new Filter(attr.getPropertyName(), FilterOperator.Equal.getCode(), val)),
Lists.newArrayList(GUID)).isEmpty()) {
throw new InvalidArgumentException(String.format("The given attribute [properyName:%s] val [%s] is not unique.", attr.getPropertyName(), String.valueOf(val)))
.withErrorCode("3242", attr.getPropertyName(), String.valueOf(val));
<<<<<<<
if (ciService.queryWithFilters(refCiTypeId, Lists.newArrayList(new Filter("guid", FilterOperator.Equal.getCode(), guid))).isEmpty()) {
String ciTypeName = getCiTypeName(refCiTypeId);
throw new InvalidArgumentException(String.format("The given guid [%s] can not be found for CiType [%s(%d)]", guid, ciTypeName, refCiTypeId))
.withErrorCode("3243", guid, ciTypeName, refCiTypeId);
=======
if (ciService.queryWithFilters(refCiTypeId,
Lists.newArrayList(new Filter(GUID, FilterOperator.Equal.getCode(), guid)),
Lists.newArrayList(GUID)).isEmpty()) {
throw new InvalidArgumentException(String.format("The given guid [%s] can not be found for CiType [%s(%d)]", guid, getCiTypeName(refCiTypeId), refCiTypeId));
>>>>>>>
if (ciService.queryWithFilters(refCiTypeId,
Lists.newArrayList(new Filter(GUID, FilterOperator.Equal.getCode(), guid)),
Lists.newArrayList(GUID)).isEmpty()) {
String ciTypeName = getCiTypeName(refCiTypeId);
throw new InvalidArgumentException(String.format("The given guid [%s] can not be found for CiType [%s(%d)]", guid, ciTypeName, refCiTypeId))
.withErrorCode("3243", guid, ciTypeName, refCiTypeId); |
<<<<<<<
String username = getCurrentUsername();
List<AdmRole> roles = getRoles();
if (isEmpty(roles))
throw new CmdbAccessDeniedException("No role found for user: " + username).withErrorCode("3123", username);
=======
UserCiTypeAuthority result = (UserCiTypeAuthority)CacheUtils.cacheLocaleCall(requestScopedCacheManager,"authorizationService-getUserAuthority",ciTypeId,()->{
String username = getCurrentUsername();
List<AdmRole> roles = getRoles();
if (isEmpty(roles))
throw new CmdbAccessDeniedException("No role found for user: " + username);
List<Integer> roleIds = roles.stream().map(AdmRole::getIdAdmRole).collect(Collectors.toList());
List<AdmRoleCiType> roleCiTypes = roleCiTypeRepository.findAdmRoleCiTypesByCiTypeIdAndRoleIds(ciTypeId, roleIds);
>>>>>>>
UserCiTypeAuthority result = (UserCiTypeAuthority)CacheUtils.cacheLocaleCall(requestScopedCacheManager,"authorizationService-getUserAuthority",ciTypeId,()->{
String username = getCurrentUsername();
List<AdmRole> roles = getRoles();
if (isEmpty(roles))
throw new CmdbAccessDeniedException("No role found for user: " + username).withErrorCode("3123", username);
List<Integer> roleIds = roles.stream().map(AdmRole::getIdAdmRole).collect(Collectors.toList());
List<AdmRoleCiType> roleCiTypes = roleCiTypeRepository.findAdmRoleCiTypesByCiTypeIdAndRoleIds(ciTypeId, roleIds); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.