conflict_resolution
stringlengths
27
16k
<<<<<<< ======= if (level == 0) { pipelineAggregatorFactory .validate(null, factories.getAggregatorFactories(), factories.getPipelineAggregatorFactories()); } if (metaData != null) { pipelineAggregatorFactory.setMetaData(metaData); } >>>>>>> if (metaData != null) { pipelineAggregatorFactory.setMetaData(metaData); }
<<<<<<< ======= import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; >>>>>>> import org.elasticsearch.common.ParsingException; <<<<<<< public BoolQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException { ======= public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { >>>>>>> public BoolQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, ParsingException {
<<<<<<< URLRepository.SUPPORTED_PROTOCOLS_SETTING, Node.NODE_INGEST_SETTING, ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING, EsExecutors.PROCESSORS_SETTING))); ======= URLRepository.SUPPORTED_PROTOCOLS_SETTING))); >>>>>>> URLRepository.SUPPORTED_PROTOCOLS_SETTING, ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING, EsExecutors.PROCESSORS_SETTING)));
<<<<<<< public class AuthEventHandler ======= import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.eventhandler.EventPriority; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import cpw.mods.fml.common.gameevent.PlayerEvent.PlayerLoggedInEvent; import cpw.mods.fml.common.gameevent.PlayerEvent.PlayerLoggedOutEvent; public class AuthEventHandler extends ServerEventHandler >>>>>>> public class AuthEventHandler extends ServerEventHandler
<<<<<<< import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ReplicationResponse; ======= >>>>>>> import org.elasticsearch.action.ActionResponse; <<<<<<< try (ShardReference replica = getReplicaShardReference(request.shardId(), request.primaryTerm())) { shardOperationOnReplica(request); response = new ReplicaResponse(replica.routingEntry().allocationId().getId(), replica.getLocalCheckpoint()); ======= ReplicaResult result; try (Releasable ignored = acquireReplicaOperationLock(request.shardId(), request.primaryTerm())) { result = shardOperationOnReplica(request); } result.respond(new ResponseListener()); } /** * Listens for the response on the replica and sends the response back to the primary. */ private class ResponseListener implements ActionListener<TransportResponse.Empty> { @Override public void onResponse(Empty response) { >>>>>>> ReplicaResult result; try (ShardReference replica = getReplicaShardReference(request.shardId(), request.primaryTerm())) { result = shardOperationOnReplica(request); response = new ReplicaResponse(replica.routingEntry().allocationId().getId(), replica.getLocalCheckpoint()); } result.respond(new ResponseListener(response)); } /** * Listens for the response on the replica and sends the response back to the primary. */ private class ResponseListener implements ActionListener<TransportResponse.Empty> { private final ReplicaResponse replicaResponse; public ResponseListener(ReplicaResponse replicaResponse) { this.replicaResponse = replicaResponse; } @Override public void onResponse(Empty response) { <<<<<<< setPhase(task, "finished"); channel.sendResponse(response); ======= @Override public void onFailure(Throwable e) { responseWithFailure(e); } >>>>>>> @Override public void onFailure(Throwable e) { responseWithFailure(e); } <<<<<<< class ShardReference implements Releasable { ======= class PrimaryShardReference implements ReplicationOperation.Primary<Request, ReplicaRequest, PrimaryResult>, Releasable { >>>>>>> class ShardReference implements Releasable {
<<<<<<< assertAcked(prepareCreate("test-idx", 2, ImmutableSettings.builder() .put(indexSettings()).put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)).put("refresh_interval", 10, TimeUnit.SECONDS))); ======= assertAcked(prepareCreate("test-idx", 2, Settings.builder() .put(indexSettings()).put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)).put("refresh_interval", 10))); >>>>>>> assertAcked(prepareCreate("test-idx", 2, Settings.builder() .put(indexSettings()).put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)).put("refresh_interval", 10, TimeUnit.SECONDS))); <<<<<<< assertAcked(prepareCreate("test-idx", 2, ImmutableSettings.builder() .put(SETTING_NUMBER_OF_SHARDS, numShards.numPrimaries).put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)).put("refresh_interval", 5, TimeUnit.SECONDS))); ======= assertAcked(prepareCreate("test-idx", 2, Settings.builder() .put(SETTING_NUMBER_OF_SHARDS, numShards.numPrimaries).put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)).put("refresh_interval", 5))); >>>>>>> assertAcked(prepareCreate("test-idx", 2, Settings.builder() .put(SETTING_NUMBER_OF_SHARDS, numShards.numPrimaries).put(SETTING_NUMBER_OF_REPLICAS, between(0, 1)).put("refresh_interval", 5, TimeUnit.SECONDS)));
<<<<<<< ======= import net.minecraft.entity.player.EntityPlayer; import cpw.mods.fml.common.ObfuscationReflectionHelper; >>>>>>> import net.minecraft.entity.player.EntityPlayer; import cpw.mods.fml.common.ObfuscationReflectionHelper;
<<<<<<< ======= import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.RandomAccessWeight; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; >>>>>>> <<<<<<< import org.elasticsearch.script.Script; ======= import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; >>>>>>> import org.elasticsearch.script.Script; <<<<<<< import org.elasticsearch.script.ScriptParameterParser; ======= import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptParameterParser; >>>>>>> import org.elasticsearch.script.ScriptParameterParser; <<<<<<< ======= import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; >>>>>>> import org.elasticsearch.script.SearchScript; <<<<<<< @Override public ScriptQueryBuilder getBuilderPrototype() { return ScriptQueryBuilder.PROTOTYPE; ======= static class ScriptQuery extends Query { private final Script script; private final SearchScript searchScript; public ScriptQuery(Script script, ScriptService scriptService, SearchLookup searchLookup) { this.script = script; this.searchScript = scriptService.search(searchLookup, script, ScriptContext.Standard.SEARCH); } @Override public String toString(String field) { StringBuilder buffer = new StringBuilder(); buffer.append("ScriptFilter("); buffer.append(script); buffer.append(")"); return buffer.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (!super.equals(obj)) return false; ScriptQuery other = (ScriptQuery) obj; return Objects.equals(script, other.script); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + Objects.hashCode(script); return result; } @Override public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { return new RandomAccessWeight(this) { @Override protected Bits getMatchingDocs(final LeafReaderContext context) throws IOException { final LeafSearchScript leafScript = searchScript.getLeafSearchScript(context); return new Bits() { @Override public boolean get(int doc) { leafScript.setDocument(doc); Object val = leafScript.run(); if (val == null) { return false; } if (val instanceof Boolean) { return (Boolean) val; } if (val instanceof Number) { return ((Number) val).longValue() != 0; } throw new IllegalArgumentException("Can't handle type [" + val + "] in script filter"); } @Override public int length() { return context.reader().maxDoc(); } }; } }; } >>>>>>> @Override public ScriptQueryBuilder getBuilderPrototype() { return ScriptQueryBuilder.PROTOTYPE;
<<<<<<< int numShards = 1 + randomInt(3); clusterService.setState(stateWithAssignedPrimariesAndOneReplica(index, numShards)); ======= int numShards = randomInt(3); setState(clusterService, stateWithAssignedPrimariesAndOneReplica(index, numShards)); >>>>>>> int numShards = 1 + randomInt(3); setState(clusterService, stateWithAssignedPrimariesAndOneReplica(index, numShards));
<<<<<<< import org.elasticsearch.action.delete.DeleteRequest; ======= import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest; >>>>>>> import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest;
<<<<<<< final Type promote = AnalyzerCaster.promoteNumeric(child.actual, false, true); ======= final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, false); >>>>>>> final Type promote = AnalyzerCaster.promoteNumeric(child.actual, false); <<<<<<< final Type promote = AnalyzerCaster.promoteNumeric(child.actual, true, true); ======= final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, true); >>>>>>> final Type promote = AnalyzerCaster.promoteNumeric(child.actual, true); <<<<<<< final Type promote = AnalyzerCaster.promoteNumeric(child.actual, true, true); ======= final Type promote = AnalyzerCaster.promoteNumeric(definition, child.actual, true); >>>>>>> final Type promote = AnalyzerCaster.promoteNumeric(child.actual, true); <<<<<<< adapter.invokeStatic(Definition.DEF_UTIL_TYPE.type, DEF_NOT_CALL); ======= adapter.invokeStatic(DEF_TYPE, DEF_NOT_CALL); >>>>>>> adapter.invokeStatic(DEF_UTIL_TYPE, DEF_NOT_CALL); <<<<<<< adapter.invokeStatic(Definition.DEF_UTIL_TYPE.type, DEF_NEG_CALL); ======= adapter.invokeStatic(DEF_TYPE, DEF_NEG_CALL); >>>>>>> adapter.invokeStatic(DEF_UTIL_TYPE, DEF_NEG_CALL);
<<<<<<< import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction; ======= >>>>>>> import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction; <<<<<<< import java.util.stream.Collectors; ======= >>>>>>> import java.util.function.Consumer; <<<<<<< RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider, GlobalCheckpointSyncAction globalCheckpointSyncAction) { ======= RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider) { this(settings, (AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>>) indicesService, clusterService, threadPool, recoveryTargetService, shardStateAction, nodeMappingRefreshAction, repositoriesService, restoreService, searchService, syncedFlushService, recoverySource, nodeServicesProvider); } // for tests IndicesClusterStateService(Settings settings, AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>> indicesService, ClusterService clusterService, ThreadPool threadPool, RecoveryTargetService recoveryTargetService, ShardStateAction shardStateAction, NodeMappingRefreshAction nodeMappingRefreshAction, RepositoriesService repositoriesService, RestoreService restoreService, SearchService searchService, SyncedFlushService syncedFlushService, RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider) { >>>>>>> RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider, GlobalCheckpointSyncAction globalCheckpointSyncAction) { this(settings, (AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>>) indicesService, clusterService, threadPool, recoveryTargetService, shardStateAction, nodeMappingRefreshAction, repositoriesService, restoreService, searchService, syncedFlushService, recoverySource, nodeServicesProvider, globalCheckpointSyncAction); } // for tests IndicesClusterStateService(Settings settings, AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>> indicesService, ClusterService clusterService, ThreadPool threadPool, RecoveryTargetService recoveryTargetService, ShardStateAction shardStateAction, NodeMappingRefreshAction nodeMappingRefreshAction, RepositoriesService repositoriesService, RestoreService restoreService, SearchService searchService, SyncedFlushService syncedFlushService, RecoverySource recoverySource, NodeServicesProvider nodeServicesProvider, GlobalCheckpointSyncAction globalCheckpointSyncAction) { <<<<<<< for (ShardRouting shard : routingNode) { if (!indicesService.hasIndex(shard.index())) { final IndexMetaData indexMetaData = event.state().metaData().getIndexSafe(shard.index()); if (logger.isDebugEnabled()) { logger.debug("[{}] creating index", indexMetaData.getIndex()); } try { indicesService.createIndex(nodeServicesProvider, indexMetaData, buildInIndexListener, globalCheckpointSyncAction::updateCheckpointForShard); } catch (Throwable e) { sendFailShard(shard, "failed to create index", e); ======= // create map of indices to create with shards to fail if index creation fails final Map<Index, List<ShardRouting>> indicesToCreate = new HashMap<>(); for (ShardRouting shardRouting : localRoutingNode) { if (failedShardsCache.containsKey(shardRouting.shardId()) == false) { final Index index = shardRouting.index(); if (indicesService.indexService(index) == null) { indicesToCreate.computeIfAbsent(index, k -> new ArrayList<>()).add(shardRouting); >>>>>>> // create map of indices to create with shards to fail if index creation fails final Map<Index, List<ShardRouting>> indicesToCreate = new HashMap<>(); for (ShardRouting shardRouting : localRoutingNode) { if (failedShardsCache.containsKey(shardRouting.shardId()) == false) { final Index index = shardRouting.index(); if (indicesService.indexService(index) == null) { indicesToCreate.computeIfAbsent(index, k -> new ArrayList<>()).add(shardRouting); <<<<<<< if (shardHasBeenRemoved == false) { try { indexShard.updateRoutingEntry(shardRouting, event.state().blocks().disableStatePersistence() == false); if (shardRouting.primary()) { final IndexShardRoutingTable shardRoutingTable = routingTable.shardRoutingTable(shardRouting.shardId()); Set<String> activeIds = shardRoutingTable.activeShards().stream().map(sr -> sr.allocationId().getId()).collect(Collectors.toSet()); Set<String> initializingIds = shardRoutingTable.getAllInitializingShards().stream().map(sr -> sr.allocationId().getId()).collect(Collectors.toSet()); indexShard.updateAllocationIdsFromMaster(activeIds, initializingIds); } } catch (Throwable e) { failAndRemoveShard(shardRouting, indexService, true, "failed updating shard routing entry", e); } } } if (shardRouting.initializing()) { applyInitializingShard(event.state(), indexService, shardRouting); ======= >>>>>>>
<<<<<<< ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= final Index index = new Index("test", "_na_"); ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> final Index index = new Index("test", "_na_"); ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); <<<<<<< ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
<<<<<<< import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.SearchSlowLog; ======= >>>>>>> import org.elasticsearch.index.seqno.SeqNoStats;
<<<<<<< import org.elasticsearch.core.internal.io.IOUtils; ======= >>>>>>> import org.elasticsearch.core.internal.io.IOUtils; <<<<<<< protected static void assertVisibleCount(InternalEngine engine, int numDocs) throws IOException { assertVisibleCount(engine, numDocs, true); } protected static void assertVisibleCount(InternalEngine engine, int numDocs, boolean refresh) throws IOException { if (refresh) { engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.searcher().search(new MatchAllDocsQuery(), collector); assertThat(collector.getTotalHits(), equalTo(numDocs)); } } public static List<Engine.Operation> generateSingleDocHistory( final boolean forReplica, final VersionType versionType, final boolean partialOldPrimary, final long primaryTerm, final int minOpCount, final int maxOpCount) { final int numOfOps = randomIntBetween(minOpCount, maxOpCount); final List<Engine.Operation> ops = new ArrayList<>(); final Term id = newUid("1"); final int startWithSeqNo; if (partialOldPrimary) { startWithSeqNo = randomBoolean() ? numOfOps - 1 : randomIntBetween(0, numOfOps - 1); } else { startWithSeqNo = 0; } final String valuePrefix = forReplica ? "r_" : "p_"; final boolean incrementTermWhenIntroducingSeqNo = randomBoolean(); for (int i = 0; i < numOfOps; i++) { final Engine.Operation op; final long version; switch (versionType) { case INTERNAL: version = forReplica ? i : Versions.MATCH_ANY; break; case EXTERNAL: version = i; break; case EXTERNAL_GTE: version = randomBoolean() ? Math.max(i - 1, 0) : i; break; case FORCE: version = randomNonNegativeLong(); break; default: throw new UnsupportedOperationException("unknown version type: " + versionType); } if (randomBoolean()) { op = new Engine.Index(id, testParsedDocument("1", null, testDocumentWithTextField(valuePrefix + i), B_1, null), forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis(), -1, false ); } else { op = new Engine.Delete("test", "1", id, forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis()); } ops.add(op); } return ops; } public static void assertOpsOnReplica( final List<Engine.Operation> ops, final InternalEngine replicaEngine, boolean shuffleOps, final Logger logger) throws IOException { final Engine.Operation lastOp = ops.get(ops.size() - 1); final String lastFieldValue; if (lastOp instanceof Engine.Index) { Engine.Index index = (Engine.Index) lastOp; lastFieldValue = index.docs().get(0).get("value"); } else { // delete lastFieldValue = null; } if (shuffleOps) { int firstOpWithSeqNo = 0; while (firstOpWithSeqNo < ops.size() && ops.get(firstOpWithSeqNo).seqNo() < 0) { firstOpWithSeqNo++; } // shuffle ops but make sure legacy ops are first shuffle(ops.subList(0, firstOpWithSeqNo), random()); shuffle(ops.subList(firstOpWithSeqNo, ops.size()), random()); } boolean firstOp = true; for (Engine.Operation op : ops) { logger.info("performing [{}], v [{}], seq# [{}], term [{}]", op.operationType().name().charAt(0), op.version(), op.seqNo(), op.primaryTerm()); if (op instanceof Engine.Index) { Engine.IndexResult result = replicaEngine.index((Engine.Index) op); // replicas don't really care to about creation status of documents // this allows to ignore the case where a document was found in the live version maps in // a delete state and return false for the created flag in favor of code simplicity // as deleted or not. This check is just signal regression so a decision can be made if it's // intentional assertThat(result.isCreated(), equalTo(firstOp)); assertThat(result.getVersion(), equalTo(op.version())); assertThat(result.hasFailure(), equalTo(false)); } else { Engine.DeleteResult result = replicaEngine.delete((Engine.Delete) op); // Replicas don't really care to about found status of documents // this allows to ignore the case where a document was found in the live version maps in // a delete state and return true for the found flag in favor of code simplicity // his check is just signal regression so a decision can be made if it's // intentional assertThat(result.isFound(), equalTo(firstOp == false)); assertThat(result.getVersion(), equalTo(op.version())); assertThat(result.hasFailure(), equalTo(false)); } if (randomBoolean()) { replicaEngine.refresh("test"); } if (randomBoolean()) { replicaEngine.flush(); replicaEngine.refresh("test"); } firstOp = false; } assertVisibleCount(replicaEngine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.searcher().search(new TermQuery(new Term("value", lastFieldValue)), collector); assertThat(collector.getTotalHits(), equalTo(1)); } } } ======= protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) { return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, VersionType.EXTERNAL, Engine.Operation.Origin.REPLICA, startTime); } >>>>>>> protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) { return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, VersionType.EXTERNAL, Engine.Operation.Origin.REPLICA, startTime); } protected static void assertVisibleCount(InternalEngine engine, int numDocs) throws IOException { assertVisibleCount(engine, numDocs, true); } protected static void assertVisibleCount(InternalEngine engine, int numDocs, boolean refresh) throws IOException { if (refresh) { engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.searcher().search(new MatchAllDocsQuery(), collector); assertThat(collector.getTotalHits(), equalTo(numDocs)); } } public static List<Engine.Operation> generateSingleDocHistory( final boolean forReplica, final VersionType versionType, final boolean partialOldPrimary, final long primaryTerm, final int minOpCount, final int maxOpCount) { final int numOfOps = randomIntBetween(minOpCount, maxOpCount); final List<Engine.Operation> ops = new ArrayList<>(); final Term id = newUid("1"); final int startWithSeqNo; if (partialOldPrimary) { startWithSeqNo = randomBoolean() ? numOfOps - 1 : randomIntBetween(0, numOfOps - 1); } else { startWithSeqNo = 0; } final String valuePrefix = forReplica ? "r_" : "p_"; final boolean incrementTermWhenIntroducingSeqNo = randomBoolean(); for (int i = 0; i < numOfOps; i++) { final Engine.Operation op; final long version; switch (versionType) { case INTERNAL: version = forReplica ? i : Versions.MATCH_ANY; break; case EXTERNAL: version = i; break; case EXTERNAL_GTE: version = randomBoolean() ? Math.max(i - 1, 0) : i; break; case FORCE: version = randomNonNegativeLong(); break; default: throw new UnsupportedOperationException("unknown version type: " + versionType); } if (randomBoolean()) { op = new Engine.Index(id, testParsedDocument("1", null, testDocumentWithTextField(valuePrefix + i), B_1, null), forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis(), -1, false ); } else { op = new Engine.Delete("test", "1", id, forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis()); } ops.add(op); } return ops; } public static void assertOpsOnReplica( final List<Engine.Operation> ops, final InternalEngine replicaEngine, boolean shuffleOps, final Logger logger) throws IOException { final Engine.Operation lastOp = ops.get(ops.size() - 1); final String lastFieldValue; if (lastOp instanceof Engine.Index) { Engine.Index index = (Engine.Index) lastOp; lastFieldValue = index.docs().get(0).get("value"); } else { // delete lastFieldValue = null; } if (shuffleOps) { int firstOpWithSeqNo = 0; while (firstOpWithSeqNo < ops.size() && ops.get(firstOpWithSeqNo).seqNo() < 0) { firstOpWithSeqNo++; } // shuffle ops but make sure legacy ops are first shuffle(ops.subList(0, firstOpWithSeqNo), random()); shuffle(ops.subList(firstOpWithSeqNo, ops.size()), random()); } boolean firstOp = true; for (Engine.Operation op : ops) { logger.info("performing [{}], v [{}], seq# [{}], term [{}]", op.operationType().name().charAt(0), op.version(), op.seqNo(), op.primaryTerm()); if (op instanceof Engine.Index) { Engine.IndexResult result = replicaEngine.index((Engine.Index) op); // replicas don't really care to about creation status of documents // this allows to ignore the case where a document was found in the live version maps in // a delete state and return false for the created flag in favor of code simplicity // as deleted or not. This check is just signal regression so a decision can be made if it's // intentional assertThat(result.isCreated(), equalTo(firstOp)); assertThat(result.getVersion(), equalTo(op.version())); assertThat(result.hasFailure(), equalTo(false)); } else { Engine.DeleteResult result = replicaEngine.delete((Engine.Delete) op); // Replicas don't really care to about found status of documents // this allows to ignore the case where a document was found in the live version maps in // a delete state and return true for the found flag in favor of code simplicity // his check is just signal regression so a decision can be made if it's // intentional assertThat(result.isFound(), equalTo(firstOp == false)); assertThat(result.getVersion(), equalTo(op.version())); assertThat(result.hasFailure(), equalTo(false)); } if (randomBoolean()) { replicaEngine.refresh("test"); } if (randomBoolean()) { replicaEngine.flush(); replicaEngine.refresh("test"); } firstOp = false; } assertVisibleCount(replicaEngine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.searcher().search(new TermQuery(new Term("value", lastFieldValue)), collector); assertThat(collector.getTotalHits(), equalTo(1)); } } }
<<<<<<< import java.util.function.Consumer; ======= import java.util.function.LongSupplier; >>>>>>> import java.util.function.Consumer; import java.util.function.LongSupplier; <<<<<<< private final AsyncGlobalCheckpointTask globalCheckpointTask; ======= private final ScriptService scriptService; private final IndicesQueriesRegistry queryRegistry; private final ClusterService clusterService; private final Client client; >>>>>>> private final AsyncGlobalCheckpointTask globalCheckpointTask; private final ScriptService scriptService; private final IndicesQueriesRegistry queryRegistry; private final ClusterService clusterService; private final Client client;
<<<<<<< ======= import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.ParseField; >>>>>>> import org.elasticsearch.common.ParseField; <<<<<<< ======= import org.elasticsearch.index.mapper.MappedFieldType; >>>>>>>
<<<<<<< // today, we clear the metadata on when there is no master, so it will go through the auto create logic and // add it... (if autoCreate is set to true) checkBulkAction(autoCreateIndex, bulkRequestBuilder); ======= bulkRequestBuilder.setTimeout(timeout); checkBulkAction(false, timeout, bulkRequestBuilder); >>>>>>> checkBulkAction(false, bulkRequestBuilder); <<<<<<< if (indexShouldBeAutoCreated) { // timeout is 200 ======= // If the index exists the bulk doesn't retry with a global block, if an index doesn't exist bulk api delegates // to the create index api which does retry / wait on a global block. if (!autoCreateIndex) { assertThat(System.currentTimeMillis() - now, lessThan(timeout.millis() / 2)); } else { >>>>>>> if (indexShouldBeAutoCreated) { // timeout is 200
<<<<<<< for (IndexingOperationListener listener : listeners) { try { listener.postIndex(index, ex); } catch (Throwable t) { logger.warn("postIndex listener [{}] failed", t, listener); } } ======= totalStats.indexFailed.inc(); typeStats(index.type()).indexFailed.inc(); >>>>>>> totalStats.indexFailed.inc(); typeStats(index.type()).indexFailed.inc(); for (IndexingOperationListener listener : listeners) { try { listener.postIndex(index, ex); } catch (Throwable t) { logger.warn("postIndex listener [{}] failed", t, listener); } }
<<<<<<< null, null, type -> typeParser, olderVersion, null, null, null); ======= null, mapperService, type -> typeParser, olderVersion, null, null); >>>>>>> null, mapperService, type -> typeParser, olderVersion, null, null, null); <<<<<<< null, null, type -> typeParser, version, null, null, null); ======= null, mapperService, type -> typeParser, version, null, null); >>>>>>> null, mapperService, type -> typeParser, version, null, null, null); <<<<<<< FieldMapper.Builder<?> builder = new KeywordFieldMapper.Builder("foo"); Mapper.TypeParser.ParserContext parserContext = new Mapper.TypeParser.ParserContext(null, null, null, null, null, null, null); ======= >>>>>>>
<<<<<<< histogram("histo").field("value").interval(2l) .subAggregation(randomCompression(percentileRanks("percentile_ranks").values(99))) ======= histogram("histo").field("value").interval(2L) .subAggregation(randomCompression(percentileRanks("percentile_ranks").percentiles(99))) >>>>>>> histogram("histo").field("value").interval(2L) .subAggregation(randomCompression(percentileRanks("percentile_ranks").values(99)))
<<<<<<< ======= import com.forgeessentials.core.moduleLauncher.FEModule.Preconditions; >>>>>>> import com.forgeessentials.core.moduleLauncher.FEModule.Preconditions; <<<<<<< import com.sk89q.worldedit.forge.ForgeWorldEdit; ======= >>>>>>> import com.sk89q.worldedit.forge.ForgeWorldEdit;
<<<<<<< .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0) .subAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location").addRange("0-100", 0.0, 100.0))) ======= .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) .subAggregation(geoDistance("geo_dist").field("location").point("52.3760, 4.894").addRange("0-100", 0.0, 100.0))) >>>>>>> .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0) .subAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location").addRange("0-100", 0.0, 100.0)))
<<<<<<< versionMap.putUnderLock(index.uid().bytes(), new VersionValue(updatedVersion, translogLocation)); index.setTranslogLocation(translogLocation); indexingService.postIndexUnderLock(index); return created; } finally { if (index.seqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) { seqNoService.markSeqNoAsCompleted(index.seqNo()); } } ======= versionMap.putUnderLock(index.uid().bytes(), new VersionValue(updatedVersion, translogLocation)); index.setTranslogLocation(translogLocation); return created; >>>>>>> versionMap.putUnderLock(index.uid().bytes(), new VersionValue(updatedVersion, translogLocation)); index.setTranslogLocation(translogLocation); return created; } finally { if (index.seqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) { seqNoService.markSeqNoAsCompleted(index.seqNo()); } } <<<<<<< delete.updateVersion(updatedVersion, found); Translog.Location translogLocation = translog.add(new Translog.Delete(delete)); versionMap.putUnderLock(delete.uid().bytes(), new DeleteVersionValue(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), translogLocation)); delete.setTranslogLocation(translogLocation); indexingService.postDeleteUnderLock(delete); } finally { if (delete.seqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) { seqNoService.markSeqNoAsCompleted(delete.seqNo()); } } ======= delete.updateVersion(updatedVersion, found); Translog.Location translogLocation = translog.add(new Translog.Delete(delete)); versionMap.putUnderLock(delete.uid().bytes(), new DeleteVersionValue(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), translogLocation)); delete.setTranslogLocation(translogLocation); >>>>>>> delete.updateVersion(updatedVersion, found); Translog.Location translogLocation = translog.add(new Translog.Delete(delete)); versionMap.putUnderLock(delete.uid().bytes(), new DeleteVersionValue(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), translogLocation)); delete.setTranslogLocation(translogLocation); } finally { if (delete.seqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) { seqNoService.markSeqNoAsCompleted(delete.seqNo()); } }
<<<<<<< import com.amazonaws.services.s3.AmazonS3; ======= import com.amazonaws.ClientConfiguration; >>>>>>> import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.ClientConfiguration; <<<<<<< // If the user defined a path style access setting, we rely on it otherwise we use the default // value set by the SDK Boolean pathStyleAccess = null; if (Repository.PATH_STYLE_ACCESS_SETTING.exists(repositorySettings.settings()) || Repositories.PATH_STYLE_ACCESS_SETTING.exists(repositorySettings.globalSettings())) { pathStyleAccess = getValue(repositorySettings, Repository.PATH_STYLE_ACCESS_SETTING, Repositories.PATH_STYLE_ACCESS_SETTING); } logger.debug("using bucket [{}], region [{}], endpoint [{}], protocol [{}], chunk_size [{}], server_side_encryption [{}], buffer_size [{}], max_retries [{}], canned_acl [{}], storage_class [{}], path_style_access [{}]", bucket, region, endpoint, protocol, chunkSize, serverSideEncryption, bufferSize, maxRetries, cannedACL, storageClass, pathStyleAccess); ======= logger.debug("using bucket [{}], region [{}], endpoint [{}], protocol [{}], chunk_size [{}], server_side_encryption [{}], " + "buffer_size [{}], max_retries [{}], use_throttle_retries [{}], cannedACL [{}], storageClass [{}]", bucket, region, endpoint, protocol, chunkSize, serverSideEncryption, bufferSize, maxRetries, useThrottleRetries, cannedACL, storageClass); >>>>>>> // If the user defined a path style access setting, we rely on it otherwise we use the default // value set by the SDK Boolean pathStyleAccess = null; if (Repository.PATH_STYLE_ACCESS_SETTING.exists(repositorySettings.settings()) || Repositories.PATH_STYLE_ACCESS_SETTING.exists(repositorySettings.globalSettings())) { pathStyleAccess = getValue(repositorySettings, Repository.PATH_STYLE_ACCESS_SETTING, Repositories.PATH_STYLE_ACCESS_SETTING); } logger.debug("using bucket [{}], region [{}], endpoint [{}], protocol [{}], chunk_size [{}], server_side_encryption [{}], " + "buffer_size [{}], max_retries [{}], use_throttle_retries [{}], cannedACL [{}], storageClass [{}], path_style_access [{}]", bucket, region, endpoint, protocol, chunkSize, serverSideEncryption, bufferSize, maxRetries, useThrottleRetries, cannedACL, storageClass, pathStyleAccess); <<<<<<< blobStore = new S3BlobStore(settings, s3Service.client(endpoint, protocol, region, key, secret, maxRetries, pathStyleAccess), ======= blobStore = new S3BlobStore(settings, s3Service.client(endpoint, protocol, region, key, secret, maxRetries, useThrottleRetries), >>>>>>> blobStore = new S3BlobStore(settings, s3Service.client(endpoint, protocol, region, key, secret, maxRetries, useThrottleRetries, pathStyleAccess),
<<<<<<< import org.elasticsearch.ElasticsearchException; ======= >>>>>>> <<<<<<< import org.elasticsearch.alerts.Alert; import org.elasticsearch.alerts.AlertManager; import org.elasticsearch.alerts.AlertsStore; import org.elasticsearch.alerts.BasicAlertingTest; import org.elasticsearch.alerts.client.AlertsClient; import org.elasticsearch.alerts.client.AlertsClientInterface; import org.elasticsearch.alerts.plugin.AlertsPlugin; import org.elasticsearch.alerts.transport.actions.create.CreateAlertRequest; import org.elasticsearch.alerts.transport.actions.create.CreateAlertResponse; import org.elasticsearch.alerts.transport.actions.delete.DeleteAlertRequest; import org.elasticsearch.alerts.transport.actions.delete.DeleteAlertResponse; import org.elasticsearch.alerts.transport.actions.get.GetAlertRequest; import org.elasticsearch.alerts.transport.actions.get.GetAlertResponse; import org.elasticsearch.alerts.transport.actions.update.UpdateAlertRequest; import org.elasticsearch.alerts.transport.actions.update.UpdateAlertResponse; ======= import org.elasticsearch.alerts.AlertManager; import org.elasticsearch.alerts.AlertsStore; import org.elasticsearch.alerts.BasicAlertingTest; import org.elasticsearch.alerts.client.AlertsClient; import org.elasticsearch.alerts.client.AlertsClientInterface; import org.elasticsearch.alerts.plugin.AlertsPlugin; import org.elasticsearch.alerts.transport.actions.create.CreateAlertRequest; import org.elasticsearch.alerts.transport.actions.create.CreateAlertResponse; >>>>>>> import org.elasticsearch.alerts.Alert; import org.elasticsearch.alerts.AlertManager; import org.elasticsearch.alerts.AlertsStore; import org.elasticsearch.alerts.client.AlertsClient; import org.elasticsearch.alerts.client.AlertsClientInterface; import org.elasticsearch.alerts.plugin.AlertsPlugin; import org.elasticsearch.alerts.transport.actions.create.CreateAlertRequest; import org.elasticsearch.alerts.transport.actions.create.CreateAlertResponse; import org.elasticsearch.alerts.transport.actions.delete.DeleteAlertRequest; import org.elasticsearch.alerts.transport.actions.delete.DeleteAlertResponse; import org.elasticsearch.alerts.transport.actions.get.GetAlertRequest; import org.elasticsearch.alerts.transport.actions.get.GetAlertResponse; import org.elasticsearch.alerts.transport.actions.update.UpdateAlertRequest; import org.elasticsearch.alerts.transport.actions.update.UpdateAlertResponse; <<<<<<< import java.io.IOException; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.core.Is.is; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; ======= import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; >>>>>>> import java.io.IOException; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.core.Is.is; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; <<<<<<< DateTime fireTime = new DateTime(DateTimeZone.UTC); DateTime scheduledFireTime = new DateTime(DateTimeZone.UTC); ======= DateTime fireTime = new DateTime(DateTimeZone.UTC); DateTime scheduledFireTime = new DateTime(DateTimeZone.UTC); >>>>>>> DateTime fireTime = new DateTime(DateTimeZone.UTC); DateTime scheduledFireTime = new DateTime(DateTimeZone.UTC); <<<<<<< @Test public void testAlertActions() throws Exception { createIndex("my-index"); createIndex(AlertsStore.ALERT_INDEX); createIndex(AlertActionManager.ALERT_HISTORY_INDEX); ensureGreen("my-index", AlertsStore.ALERT_INDEX, AlertActionManager.ALERT_HISTORY_INDEX); client().preparePutIndexedScript() .setScriptLang("mustache") .setId("query") .setSource(jsonBuilder().startObject().startObject("template").startObject("match_all").endObject().endObject().endObject()) .get(); final AlertManager alertManager = internalCluster().getInstance(AlertManager.class, internalCluster().getMasterName()); assertBusy(new Runnable() { @Override public void run() { assertThat(alertManager.isStarted(), is(true)); } }); final AtomicBoolean alertActionInvoked = new AtomicBoolean(false); final AlertAction alertAction = new AlertAction() { @Override public String getActionName() { return "test"; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.endObject(); return builder; } @Override public void writeTo(StreamOutput out) throws IOException { } @Override public void readFrom(StreamInput in) throws IOException { } @Override public boolean doAction(Alert alert, AlertActionEntry actionEntry) { logger.info("Alert {} invoked: {}", alert.alertName(), actionEntry); alertActionInvoked.set(true); return true; } }; AlertActionRegistry alertActionRegistry = internalCluster().getInstance(AlertActionRegistry.class, internalCluster().getMasterName()); alertActionRegistry.registerAction("test", new AlertActionFactory() { @Override public AlertAction createAction(XContentParser parser) throws IOException { parser.nextToken(); return alertAction; } @Override public AlertAction readFrom(StreamInput in) throws IOException { return alertAction; } }); AlertTrigger alertTrigger = new AlertTrigger(new ScriptedAlertTrigger("return true", ScriptService.ScriptType.INLINE, "groovy")); Alert alert = new Alert( "my-first-alert", client().prepareSearch("my-index").setQuery(QueryBuilders.matchAllQuery()).request(), alertTrigger, Arrays.asList(alertAction), "0/5 * * * * ? *", null, 1, true ); CreateAlertRequest alertRequest = new CreateAlertRequest(alert); AlertsClientInterface alertsClient = internalCluster().getInstance(AlertsClient.class, internalCluster().getMasterName()); CreateAlertResponse alertsResponse = alertsClient.createAlert(alertRequest).actionGet(); assertTrue(alertsResponse.success()); GetAlertRequest getAlertRequest = new GetAlertRequest(alert.alertName()); GetAlertResponse getAlertResponse = alertsClient.getAlert(getAlertRequest).actionGet(); assertTrue(getAlertResponse.found()); assertEquals(alert.schedule(), getAlertResponse.alert().schedule()); String schedule = "0/10 * * * * ? *"; alert.schedule(schedule); UpdateAlertRequest updateAlertRequest = new UpdateAlertRequest(alert); UpdateAlertResponse updateAlertResponse = alertsClient.updateAlert(updateAlertRequest).actionGet(); assertTrue(updateAlertResponse.success()); DeleteAlertRequest deleteAlertRequest = new DeleteAlertRequest(alert.alertName()); DeleteAlertResponse deleteAlertResponse = alertsClient.deleteAlert(deleteAlertRequest).actionGet(); assertTrue(deleteAlertResponse.success()); getAlertResponse = alertsClient.getAlert(getAlertRequest).actionGet(); assertFalse(getAlertResponse.found()); updateAlertResponse = alertsClient.updateAlert(updateAlertRequest).actionGet(); assertFalse(updateAlertResponse.success()); } ======= @Test public void testCreateAlert() throws Exception { createIndex("my-index"); createIndex(AlertsStore.ALERT_INDEX); createIndex(AlertActionManager.ALERT_HISTORY_INDEX); ensureGreen("my-index", AlertsStore.ALERT_INDEX, AlertActionManager.ALERT_HISTORY_INDEX); client().preparePutIndexedScript() .setScriptLang("mustache") .setId("query") .setSource(jsonBuilder().startObject().startObject("template").startObject("match_all").endObject().endObject().endObject()) .get(); CreateAlertRequest alertRequest = new CreateAlertRequest("myAlert"); alertRequest.queryName("test-query"); alertRequest.enabled(true); alertRequest.schedule("0/5 * * * * ? *"); alertRequest.trigger(new AlertTrigger(AlertTrigger.SimpleTrigger.GREATER_THAN, AlertTrigger.TriggerType.NUMBER_OF_EVENTS, 1)); alertRequest.timePeriod(new TimeValue(300, TimeUnit.SECONDS)); alertRequest.actions(new ArrayList<String>()); alertRequest.lastRan(new DateTime()); alertRequest.lastActionFire(new DateTime()); alertRequest.running(new DateTime()); AlertsClientInterface alertsClient = internalCluster().getInstance(AlertsClient.class, internalCluster().getMasterName()); CreateAlertResponse alertsResponse = alertsClient.createAlert(alertRequest).actionGet(); assertTrue(alertsResponse.success()); } >>>>>>> @Test public void testAlertActions() throws Exception { createIndex("my-index"); createIndex(AlertsStore.ALERT_INDEX); createIndex(AlertActionManager.ALERT_HISTORY_INDEX); ensureGreen("my-index", AlertsStore.ALERT_INDEX, AlertActionManager.ALERT_HISTORY_INDEX); client().preparePutIndexedScript() .setScriptLang("mustache") .setId("query") .setSource(jsonBuilder().startObject().startObject("template").startObject("match_all").endObject().endObject().endObject()) .get(); final AlertManager alertManager = internalCluster().getInstance(AlertManager.class, internalCluster().getMasterName()); assertBusy(new Runnable() { @Override public void run() { assertThat(alertManager.isStarted(), is(true)); } }); final AtomicBoolean alertActionInvoked = new AtomicBoolean(false); final AlertAction alertAction = new AlertAction() { @Override public String getActionName() { return "test"; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.endObject(); return builder; } @Override public void writeTo(StreamOutput out) throws IOException { } @Override public void readFrom(StreamInput in) throws IOException { } @Override public boolean doAction(Alert alert, AlertActionEntry actionEntry) { logger.info("Alert {} invoked: {}", alert.alertName(), actionEntry); alertActionInvoked.set(true); return true; } }; AlertActionRegistry alertActionRegistry = internalCluster().getInstance(AlertActionRegistry.class, internalCluster().getMasterName()); alertActionRegistry.registerAction("test", new AlertActionFactory() { @Override public AlertAction createAction(XContentParser parser) throws IOException { parser.nextToken(); return alertAction; } @Override public AlertAction readFrom(StreamInput in) throws IOException { return alertAction; } }); AlertTrigger alertTrigger = new AlertTrigger(new ScriptedAlertTrigger("return true", ScriptService.ScriptType.INLINE, "groovy")); Alert alert = new Alert( "my-first-alert", client().prepareSearch("my-index").setQuery(QueryBuilders.matchAllQuery()).request(), alertTrigger, Arrays.asList(alertAction), "0/5 * * * * ? *", null, 1, true ); AlertsClientInterface alertsClient = internalCluster().getInstance(AlertsClient.class, internalCluster().getMasterName()); CreateAlertRequest alertRequest = new CreateAlertRequest(alert); CreateAlertResponse alertsResponse = alertsClient.createAlert(alertRequest).actionGet(); assertTrue(alertsResponse.success()); GetAlertRequest getAlertRequest = new GetAlertRequest(alert.alertName()); GetAlertResponse getAlertResponse = alertsClient.getAlert(getAlertRequest).actionGet(); assertTrue(getAlertResponse.found()); assertEquals(alert.schedule(), getAlertResponse.alert().schedule()); String schedule = "0/10 * * * * ? *"; alert.schedule(schedule); UpdateAlertRequest updateAlertRequest = new UpdateAlertRequest(alert); UpdateAlertResponse updateAlertResponse = alertsClient.updateAlert(updateAlertRequest).actionGet(); assertTrue(updateAlertResponse.success()); DeleteAlertRequest deleteAlertRequest = new DeleteAlertRequest(alert.alertName()); DeleteAlertResponse deleteAlertResponse = alertsClient.deleteAlert(deleteAlertRequest).actionGet(); assertTrue(deleteAlertResponse.success()); getAlertResponse = alertsClient.getAlert(getAlertRequest).actionGet(); assertFalse(getAlertResponse.found()); updateAlertResponse = alertsClient.updateAlert(updateAlertRequest).actionGet(); assertFalse(updateAlertResponse.success()); }
<<<<<<< // For the case where a cast is not required but a // constant is set, an EConstant replaces this node // with the constant copied from this node. Note that // for constants output data does not need to be copied // from this node because the output data for the EConstant // will already be the same. EConstant econstant = new EConstant(line, offset, location, constant); ======= final EConstant econstant = new EConstant(location, constant); >>>>>>> // For the case where a cast is not required but a // constant is set, an EConstant replaces this node // with the constant copied from this node. Note that // for constants output data does not need to be copied // from this node because the output data for the EConstant // will already be the same. EConstant econstant = new EConstant(location, constant); <<<<<<< // For the case where a cast is required and a constant is not set. // Modify the tree to add an ECast between this node and its parent. // The output data from this node is copied to the ECast for // further reads done by the parent. ECast ecast = new ECast(line, offset, location, this, cast); ======= final ECast ecast = new ECast(location, this, cast); >>>>>>> // For the case where a cast is required and a constant is not set. // Modify the tree to add an ECast between this node and its parent. // The output data from this node is copied to the ECast for // further reads done by the parent. ECast ecast = new ECast(location, this, cast); <<<<<<< EConstant econstant = new EConstant(line, offset, location, constant); ======= final EConstant econstant = new EConstant(location, constant); >>>>>>> EConstant econstant = new EConstant(location, constant); <<<<<<< // For the case where a cast is required, a constant is set, // the constant cannot be immediately cast to the expected type, // and this node is already an EConstant. Modify the tree to add // an ECast between this node and its parent. Note that // for constants output data does not need to be copied // from this node because the output data for the EConstant // will already be the same. ECast ecast = new ECast(line, offset, location, this, cast); ======= final ECast ecast = new ECast(location, this, cast); >>>>>>> // For the case where a cast is required, a constant is set, // the constant cannot be immediately cast to the expected type, // and this node is already an EConstant. Modify the tree to add // an ECast between this node and its parent. Note that // for constants output data does not need to be copied // from this node because the output data for the EConstant // will already be the same. ECast ecast = new ECast(location, this, cast); <<<<<<< // For the case where a cast is required, a constant is set, // the constant cannot be immediately cast to the expected type, // and this node is not an EConstant. Replace this node with // an Econstant node copying the constant from this node. // Modify the tree to add an ECast between the EConstant node // and its parent. Note that for constants output data does not // need to be copied from this node because the output data for // the EConstant will already be the same. EConstant econstant = new EConstant(line, offset, location, constant); ======= final EConstant econstant = new EConstant(location, constant); >>>>>>> // For the case where a cast is required, a constant is set, // the constant cannot be immediately cast to the expected type, // and this node is not an EConstant. Replace this node with // an Econstant node copying the constant from this node. // Modify the tree to add an ECast between the EConstant node // and its parent. Note that for constants output data does not // need to be copied from this node because the output data for // the EConstant will already be the same. EConstant econstant = new EConstant(location, constant); <<<<<<< ECast ecast = new ECast(line, offset, location, econstant, cast); ======= final ECast ecast = new ECast(location, econstant, cast); >>>>>>> ECast ecast = new ECast(location, econstant, cast);
<<<<<<< protected boolean assertPrimaryCanOptimizeAddDocument(final Index index) { assert (index.version() == Versions.MATCH_ANY && index.versionType() == VersionType.INTERNAL) : "version: " + index.version() + " type: " + index.versionType(); return true; } private boolean assertVersionType(final Engine.Operation operation) { if (operation.origin() == Operation.Origin.REPLICA || operation.origin() == Operation.Origin.PEER_RECOVERY || operation.origin() == Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { // ensure that replica operation has expected version type for replication // ensure that versionTypeForReplicationAndRecovery is idempotent assert operation.versionType() == operation.versionType().versionTypeForReplicationAndRecovery() : "unexpected version type in request from [" + operation.origin().name() + "] " + "found [" + operation.versionType().name() + "] " + "expected [" + operation.versionType().versionTypeForReplicationAndRecovery().name() + "]"; } return true; } ======= >>>>>>> protected boolean assertPrimaryCanOptimizeAddDocument(final Index index) { assert (index.version() == Versions.MATCH_ANY && index.versionType() == VersionType.INTERNAL) : "version: " + index.version() + " type: " + index.versionType(); return true; } <<<<<<< protected DeletionStrategy deletionStrategyForOperation(final Delete delete) throws IOException { if (delete.origin() == Operation.Origin.PRIMARY) { return planDeletionAsPrimary(delete); } else { // non-primary mode (i.e., replica or recovery) return planDeletionAsNonPrimary(delete); } } protected final DeletionStrategy planDeletionAsNonPrimary(Delete delete) throws IOException { assertNonPrimaryOrigin(delete); // drop out of order operations assert delete.versionType().versionTypeForReplicationAndRecovery() == delete.versionType() : "resolving out of order delivery based on versioning but version type isn't fit for it. got [" + delete.versionType() + "]"; ======= private DeletionStrategy planDeletionAsNonPrimary(Delete delete) throws IOException { assert delete.origin() != Operation.Origin.PRIMARY : "planing as primary but got " + delete.origin(); >>>>>>> protected DeletionStrategy deletionStrategyForOperation(final Delete delete) throws IOException { if (delete.origin() == Operation.Origin.PRIMARY) { return planDeletionAsPrimary(delete); } else { // non-primary mode (i.e., replica or recovery) return planDeletionAsNonPrimary(delete); } } protected final DeletionStrategy planDeletionAsNonPrimary(Delete delete) throws IOException { assertNonPrimaryOrigin(delete);
<<<<<<< public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget, Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher, TimeValue timeout, FetchPhase fetchPhase) { super(parseFieldMatcher, request); ======= public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget, Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher, TimeValue timeout ) { super(parseFieldMatcher); >>>>>>> public DefaultSearchContext(long id, ShardSearchRequest request, SearchShardTarget shardTarget, Engine.Searcher engineSearcher, IndexService indexService, IndexShard indexShard, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher, TimeValue timeout, FetchPhase fetchPhase) { super(parseFieldMatcher);
<<<<<<< ======= import java.util.Arrays; import java.util.List; import org.elasticsearch.client.support.Headers; >>>>>>> import java.util.Arrays; import java.util.List;
<<<<<<< public static class FieldValueScriptPlugin extends Plugin { public void onModule(ScriptModule module) { module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.NAME, true)); ======= public static class FieldValueScriptPlugin extends Plugin implements ScriptPlugin { @Override public String name() { return FieldValueScriptEngine.NAME; } @Override public String description() { return "Mock script engine for " + ValueCountIT.class; } @Override public ScriptEngineService getScriptEngineService(Settings settings) { return new FieldValueScriptEngine(); >>>>>>> public static class FieldValueScriptPlugin extends Plugin implements ScriptPlugin { @Override public ScriptEngineService getScriptEngineService(Settings settings) { return new FieldValueScriptEngine();
<<<<<<< import com.forgeessentials.commons.selections.Point; ======= import java.util.HashMap; import java.util.List; import net.minecraft.command.ICommandSender; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraftforge.permissions.PermissionsManager.RegisteredPermValue; >>>>>>> import com.forgeessentials.commons.selections.Point; import java.util.HashMap; import java.util.List; import net.minecraft.command.ICommandSender; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraftforge.permissions.PermissionsManager.RegisteredPermValue; <<<<<<< import com.forgeessentials.commons.selections.WarpPoint; import com.forgeessentials.util.teleport.TeleportCenter; import cpw.mods.fml.common.FMLCommonHandler; import net.minecraft.command.ICommandSender; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraftforge.permissions.PermissionsManager.RegisteredPermValue; ======= import com.forgeessentials.util.selections.Point; import com.forgeessentials.util.selections.WarpPoint; >>>>>>> import com.forgeessentials.commons.selections.WarpPoint; import com.forgeessentials.util.teleport.TeleportCenter;
<<<<<<< for (User user : Arrays.asList(XPackUser.INSTANCE, superuser)) { List<Tuple<String, TransportRequest>> requests = new ArrayList<>(); requests.add(new Tuple<>(DeleteAction.NAME, new DeleteRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(BulkAction.NAME + "[s]", createBulkShardRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, DeleteRequest::new))); requests.add(new Tuple<>(UpdateAction.NAME, new UpdateRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(IndexAction.NAME, new IndexRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(BulkAction.NAME + "[s]", createBulkShardRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, IndexRequest::new))); requests.add(new Tuple<>(SearchAction.NAME, new SearchRequest(SecurityLifecycleService.SECURITY_INDEX_NAME))); requests.add(new Tuple<>(TermVectorsAction.NAME, new TermVectorsRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(GetAction.NAME, new GetRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(TermVectorsAction.NAME, new TermVectorsRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(IndicesAliasesAction.NAME, new IndicesAliasesRequest() .addAliasAction(AliasActions.add().alias("security_alias").index(SecurityLifecycleService.SECURITY_INDEX_NAME)))); requests.add(new Tuple<>(ClusterHealthAction.NAME, new ClusterHealthRequest(SecurityLifecycleService.SECURITY_INDEX_NAME))); requests.add(new Tuple<>(ClusterHealthAction.NAME, new ClusterHealthRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "foo", "bar"))); for (Tuple<String, TransportRequest> requestTuple : requests) { String action = requestTuple.v1(); TransportRequest request = requestTuple.v2(); authorize(createAuthentication(user), action, request); verify(auditTrail).accessGranted(user, action, request, null); } ======= List<Tuple<String, TransportRequest>> requests = new ArrayList<>(); requests.add(new Tuple<>(DeleteAction.NAME, new DeleteRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(BulkAction.NAME + "[s]", createBulkShardRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, DeleteRequest::new))); requests.add(new Tuple<>(UpdateAction.NAME, new UpdateRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(IndexAction.NAME, new IndexRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(BulkAction.NAME + "[s]", createBulkShardRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, IndexRequest::new))); requests.add(new Tuple<>(SearchAction.NAME, new SearchRequest(SecurityLifecycleService.SECURITY_INDEX_NAME))); requests.add(new Tuple<>(TermVectorsAction.NAME, new TermVectorsRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(GetAction.NAME, new GetRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(TermVectorsAction.NAME, new TermVectorsRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(IndicesAliasesAction.NAME, new IndicesAliasesRequest() .addAliasAction(AliasActions.add().alias("security_alias").index(SecurityLifecycleService.SECURITY_INDEX_NAME)))); requests.add(new Tuple<>(ClusterHealthAction.NAME, new ClusterHealthRequest(SecurityLifecycleService.SECURITY_INDEX_NAME))); requests.add(new Tuple<>(ClusterHealthAction.NAME, new ClusterHealthRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "foo", "bar"))); for (Tuple<String, TransportRequest> requestTuple : requests) { String action = requestTuple.v1(); TransportRequest request = requestTuple.v2(); authorize(createAuthentication(superuser), action, request); verify(auditTrail).accessGranted(superuser, action, request); >>>>>>> List<Tuple<String, TransportRequest>> requests = new ArrayList<>(); requests.add(new Tuple<>(DeleteAction.NAME, new DeleteRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(BulkAction.NAME + "[s]", createBulkShardRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, DeleteRequest::new))); requests.add(new Tuple<>(UpdateAction.NAME, new UpdateRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(IndexAction.NAME, new IndexRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(BulkAction.NAME + "[s]", createBulkShardRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, IndexRequest::new))); requests.add(new Tuple<>(SearchAction.NAME, new SearchRequest(SecurityLifecycleService.SECURITY_INDEX_NAME))); requests.add(new Tuple<>(TermVectorsAction.NAME, new TermVectorsRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(GetAction.NAME, new GetRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(TermVectorsAction.NAME, new TermVectorsRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "type", "id"))); requests.add(new Tuple<>(IndicesAliasesAction.NAME, new IndicesAliasesRequest() .addAliasAction(AliasActions.add().alias("security_alias").index(SecurityLifecycleService.SECURITY_INDEX_NAME)))); requests.add(new Tuple<>(ClusterHealthAction.NAME, new ClusterHealthRequest(SecurityLifecycleService.SECURITY_INDEX_NAME))); requests.add(new Tuple<>(ClusterHealthAction.NAME, new ClusterHealthRequest(SecurityLifecycleService.SECURITY_INDEX_NAME, "foo", "bar"))); for (Tuple<String, TransportRequest> requestTuple : requests) { String action = requestTuple.v1(); TransportRequest request = requestTuple.v2(); authorize(createAuthentication(superuser), action, request); verify(auditTrail).accessGranted(superuser, action, request, null); <<<<<<< authorize(createAuthentication(XPackUser.INSTANCE), action, request); verify(auditTrail).accessGranted(XPackUser.INSTANCE, action, request, null); assertThat(request.indices(), arrayContaining(".security")); request = new SearchRequest("_all"); ======= >>>>>>>
<<<<<<< ======= import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.util.iterable.Iterables; >>>>>>> import org.elasticsearch.common.ParsingException; <<<<<<< public IdsQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { ======= public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { >>>>>>> public IdsQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { <<<<<<< String id = parser.textOrNull(); if (id == null) { throw new QueryParsingException(parseContext, "No value specified for term filter"); ======= BytesRef value = parser.utf8BytesOrNull(); if (value == null) { throw new ParsingException(parseContext, "No value specified for term filter"); >>>>>>> String id = parser.textOrNull(); if (id == null) { throw new ParsingException(parseContext, "No value specified for term filter");
<<<<<<< ident = byUsername.get(player.getName().toLowerCase()); if (ident == null) ======= ident = byUsername.get(player.getCommandSenderName().toLowerCase()); if (ident != null) { ident.uuid = player.getPersistentID(); byUuid.put(ident.uuid, ident); } else >>>>>>> ident = byUsername.get(player.getName().toLowerCase()); if (ident != null) { ident.uuid = player.getPersistentID(); byUuid.put(ident.uuid, ident); } else <<<<<<< return MinecraftServer.getServer().getConfigurationManager().getPlayerByUsername(username); ======= MinecraftServer mc = MinecraftServer.getServer(); if (mc == null) return null; ServerConfigurationManager configurationManager = mc.getConfigurationManager(); return configurationManager == null ? null : configurationManager.func_152612_a(username); >>>>>>> MinecraftServer mc = MinecraftServer.getServer(); if (mc == null) return null; ServerConfigurationManager configurationManager = mc.getConfigurationManager(); return configurationManager == null ? null : configurationManager.getPlayerByUsername(username);
<<<<<<< ======= import org.apache.lucene.index.Term; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; >>>>>>> import org.elasticsearch.common.ParseField;
<<<<<<< import com.carrotsearch.hppc.IntObjectHashMap; ======= >>>>>>> <<<<<<< import org.elasticsearch.search.fetch.FetchPhase; ======= import org.elasticsearch.search.aggregations.support.AggregationContext; >>>>>>> import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.fetch.FetchPhase; <<<<<<< private final ParseFieldMatcher parseFieldMatcher; private final FetchPhase fetchPhase; ======= private final PercolatorIndex single; private final PercolatorIndex multi; >>>>>>> private final PercolatorIndex single; private final PercolatorIndex multi; private final ParseFieldMatcher parseFieldMatcher; private final FetchPhase fetchPhase; <<<<<<< PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, HighlightPhase highlightPhase, ClusterService clusterService, AggregationPhase aggregationPhase, ScriptService scriptService, MappingUpdatedAction mappingUpdatedAction, FetchPhase fetchPhase) { ======= PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, HighlightPhase highlightPhase, ClusterService clusterService, AggregationPhase aggregationPhase, ScriptService scriptService, PercolateDocumentParser percolateDocumentParser) { >>>>>>> PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, HighlightPhase highlightPhase, ClusterService clusterService, AggregationPhase aggregationPhase, ScriptService scriptService, PercolateDocumentParser percolateDocumentParser, FetchPhase fetchPhase) { <<<<<<< this.fetchPhase = fetchPhase; ======= this.percolateDocumentParser = percolateDocumentParser; >>>>>>> this.percolateDocumentParser = percolateDocumentParser; this.fetchPhase = fetchPhase;
<<<<<<< }, new TranslogHandler(shardId.index().getName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5)); ======= }, new TranslogHandler(shardId.getIndexName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5)); >>>>>>> }, new TranslogHandler(shardId.getIndexName(), logger), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5)); <<<<<<< assertThat(create.version(), equalTo(1l)); create = new Engine.Index(newUid("1"), doc, create.seqNo(), create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0); ======= assertThat(create.version(), equalTo(1L)); create = new Engine.Index(newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0); >>>>>>> assertThat(create.version(), equalTo(1L)); create = new Engine.Index(newUid("1"), doc, create.seqNo(), create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0); <<<<<<< delete = new Engine.Delete("test", "1", newUid("1"), SequenceNumbersService.UNASSIGNED_SEQ_NO, 2l, VersionType.INTERNAL, PRIMARY, 0, false); ======= delete = new Engine.Delete("test", "1", newUid("1"), 2L, VersionType.INTERNAL, PRIMARY, 0, false); >>>>>>> delete = new Engine.Delete("test", "1", newUid("1"), SequenceNumbersService.UNASSIGNED_SEQ_NO, 2L, VersionType.INTERNAL, PRIMARY, 0, false); <<<<<<< index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2l, VersionType.INTERNAL, PRIMARY, 0); ======= index = new Engine.Index(newUid("1"), doc, 2L, VersionType.INTERNAL, PRIMARY, 0); >>>>>>> index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2L, VersionType.INTERNAL, PRIMARY, 0); <<<<<<< delete = new Engine.Delete("test", "1", newUid("1"), SequenceNumbersService.UNASSIGNED_SEQ_NO, 2l, VersionType.INTERNAL, PRIMARY, 0, false); ======= delete = new Engine.Delete("test", "1", newUid("1"), 2L, VersionType.INTERNAL, PRIMARY, 0, false); >>>>>>> delete = new Engine.Delete("test", "1", newUid("1"), SequenceNumbersService.UNASSIGNED_SEQ_NO, 2L, VersionType.INTERNAL, PRIMARY, 0, false); <<<<<<< index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2l, VersionType.INTERNAL, PRIMARY, 0); ======= index = new Engine.Index(newUid("1"), doc, 2L, VersionType.INTERNAL, PRIMARY, 0); >>>>>>> index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2L, VersionType.INTERNAL, PRIMARY, 0); <<<<<<< index = new Engine.Index(newUid("1"), doc, index.seqNo(), 1l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); ======= index = new Engine.Index(newUid("1"), doc, 1L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); >>>>>>> index = new Engine.Index(newUid("1"), doc, index.seqNo(), 1L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); <<<<<<< index = new Engine.Index(newUid("1"), doc, index.seqNo(), 2l ======= index = new Engine.Index(newUid("1"), doc, 2L >>>>>>> index = new Engine.Index(newUid("1"), doc, index.seqNo(), 2L <<<<<<< index = new Engine.Index(newUid("1"), doc, index.seqNo(), 1l ======= index = new Engine.Index(newUid("1"), doc, 1L >>>>>>> index = new Engine.Index(newUid("1"), doc, index.seqNo(), 1L <<<<<<< delete = new Engine.Delete("test", "1", newUid("1"), delete.seqNo(), 3l ======= delete = new Engine.Delete("test", "1", newUid("1"), 3L >>>>>>> delete = new Engine.Delete("test", "1", newUid("1"), delete.seqNo(), 3L <<<<<<< delete = new Engine.Delete("test", "1", newUid("1"), delete.seqNo(), 3l ======= delete = new Engine.Delete("test", "1", newUid("1"), 3L >>>>>>> delete = new Engine.Delete("test", "1", newUid("1"), delete.seqNo(), 3L <<<<<<< index = new Engine.Index(newUid("1"), doc, index.seqNo(), 2l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); ======= index = new Engine.Index(newUid("1"), doc, 2L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); >>>>>>> index = new Engine.Index(newUid("1"), doc, index.seqNo(), 2L, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0); <<<<<<< // #10312 public void testDeletesAloneCanTriggerRefresh() throws Exception { try (Store store = createStore(); Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new MergeSchedulerConfig(defaultSettings), newMergePolicy()), false)) { engine.config().setIndexingBufferSize(new ByteSizeValue(1, ByteSizeUnit.KB)); for (int i = 0; i < 100; i++) { String id = Integer.toString(i); ParsedDocument doc = testParsedDocument(id, id, "test", null, -1, -1, testDocument(), B_1, null); engine.index(new Engine.Index(newUid(id), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime())); } // Force merge so we know all merges are done before we start deleting: engine.forceMerge(true, 1, false, false, false); Searcher s = engine.acquireSearcher("test"); final long version1 = ((DirectoryReader) s.reader()).getVersion(); s.close(); for (int i = 0; i < 100; i++) { String id = Integer.toString(i); engine.delete(new Engine.Delete("test", id, newUid(id), SequenceNumbersService.UNASSIGNED_SEQ_NO, 10, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false)); } // We must assertBusy because refresh due to version map being full is done in background (REFRESH) thread pool: assertBusy(() -> { Searcher s2 = engine.acquireSearcher("test"); long version2 = ((DirectoryReader) s2.reader()).getVersion(); s2.close(); // 100 buffered deletes will easily exceed 25% of our 1 KB indexing buffer so it should have forced a refresh: assertThat(version2, greaterThan(version1)); }); } } ======= >>>>>>>
<<<<<<< public TypeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException { ======= public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { >>>>>>> public TypeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { <<<<<<< String queryName = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("value".equals(currentFieldName)) { type = parser.utf8Bytes(); } } else { throw new QueryParsingException(parseContext, "[type] filter doesn't support [" + currentFieldName + "]"); } ======= XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { throw new ParsingException(parseContext, "[type] filter should have a value field, and the type name"); } String fieldName = parser.currentName(); if (!fieldName.equals("value")) { throw new ParsingException(parseContext, "[type] filter should have a value field, and the type name"); } token = parser.nextToken(); if (token != XContentParser.Token.VALUE_STRING) { throw new ParsingException(parseContext, "[type] filter should have a value field, and the type name"); >>>>>>> String queryName = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; String currentFieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("value".equals(currentFieldName)) { type = parser.utf8Bytes(); } } else { throw new ParsingException(parseContext, "[type] filter doesn't support [" + currentFieldName + "]"); }
<<<<<<< import org.elasticsearch.node.Node; ======= import org.elasticsearch.common.transport.TransportAddress; >>>>>>> import org.elasticsearch.node.Node; import org.elasticsearch.common.transport.TransportAddress; <<<<<<< public Map<String, String> buildAttributes() { Map<String, String> attributes = new HashMap<>(Node.NODE_ATTRIBUTES.get(this.settings).getAsMap()); ======= public DiscoveryNode buildLocalNode(TransportAddress publishAddress) { Map<String, String> attributes = new HashMap<>(settings.getByPrefix("node.").getAsMap()); >>>>>>> public DiscoveryNode buildLocalNode(TransportAddress publishAddress) { Map<String, String> attributes = new HashMap<>(Node.NODE_ATTRIBUTES.get(this.settings).getAsMap());
<<<<<<< public static final String NAME = "mlt"; ======= // document inputs private List<String> likeTexts = new ArrayList<>(); private List<String> unlikeTexts = new ArrayList<>(); private List<Item> likeItems = new ArrayList<>(); private List<Item> unlikeItems = new ArrayList<>(); >>>>>>> public static final String NAME = "mlt"; // document inputs private List<String> likeTexts = new ArrayList<>(); private List<String> unlikeTexts = new ArrayList<>(); private List<Item> likeItems = new ArrayList<>(); private List<Item> unlikeItems = new ArrayList<>(); <<<<<<< private float boostTerms = -1; ======= private String[] stopWords = null; >>>>>>> private String[] stopWords = null; <<<<<<< static final MoreLikeThisQueryBuilder PROTOTYPE = new MoreLikeThisQueryBuilder(); ======= private float boost = -1; private String queryName; >>>>>>> static final MoreLikeThisQueryBuilder PROTOTYPE = new MoreLikeThisQueryBuilder(); <<<<<<< ======= /** * Number of terms that must match the generated query expressed in the * common syntax for minimum should match. Defaults to <tt>30%</tt>. * * @see org.elasticsearch.common.lucene.search.Queries#calculateMinShouldMatch(int, String) */ public MoreLikeThisQueryBuilder minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; return this; } /** * Sets the boost factor to use when boosting terms. Defaults to <tt>1</tt>. */ public MoreLikeThisQueryBuilder boostTerms(float boostTerms) { this.boostTerms = boostTerms; return this; } /** * Whether to include the input documents. Defaults to <tt>false</tt> */ public MoreLikeThisQueryBuilder include(boolean include) { this.include = include; return this; } >>>>>>> /** * Number of terms that must match the generated query expressed in the * common syntax for minimum should match. Defaults to <tt>30%</tt>. * * @see org.elasticsearch.common.lucene.search.Queries#calculateMinShouldMatch(int, String) */ public MoreLikeThisQueryBuilder minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; return this; } /** * Sets the boost factor to use when boosting terms. Defaults to <tt>1</tt>. */ public MoreLikeThisQueryBuilder boostTerms(float boostTerms) { this.boostTerms = boostTerms; return this; } /** * Whether to include the input documents. Defaults to <tt>false</tt> */ public MoreLikeThisQueryBuilder include(boolean include) { this.include = include; return this; } <<<<<<< ======= @Override public MoreLikeThisQueryBuilder boost(float boost) { this.boost = boost; return this; } /** * Sets the query name for the filter that can be used when searching for matched_filters per hit. */ public MoreLikeThisQueryBuilder queryName(String queryName) { this.queryName = queryName; return this; } /** * The text to use in order to find documents that are "like" this. */ @Deprecated public MoreLikeThisQueryBuilder likeText(String likeText) { return like(likeText); } @Deprecated public MoreLikeThisQueryBuilder ids(String... ids) { Item[] items = new Item[ids.length]; for (int i = 0; i < items.length; i++) { items[i] = new Item(null, null, ids[i]); } return like(items); } @Deprecated public MoreLikeThisQueryBuilder docs(Item... docs) { return like(docs); } /** * Sets the documents from which the terms should not be selected from. * * @Deprecated Use {@link #unlike(Item...)} instead */ @Deprecated public MoreLikeThisQueryBuilder ignoreLike(Item... docs) { return unlike(docs); } /** * Sets the text from which the terms should not be selected from. * * @Deprecated Use {@link #unlike(String...)} instead. */ @Deprecated public MoreLikeThisQueryBuilder ignoreLike(String... likeText) { return unlike(likeText); } /** * Adds a document to use in order to find documents that are "like" this. */ @Deprecated public MoreLikeThisQueryBuilder addItem(Item... likeItems) { return addLikeItem(likeItems); } >>>>>>> /** * The text to use in order to find documents that are "like" this. */ @Deprecated public MoreLikeThisQueryBuilder likeText(String likeText) { return like(likeText); } @Deprecated public MoreLikeThisQueryBuilder ids(String... ids) { Item[] items = new Item[ids.length]; for (int i = 0; i < items.length; i++) { items[i] = new Item(null, null, ids[i]); } return like(items); } @Deprecated public MoreLikeThisQueryBuilder docs(Item... docs) { return like(docs); } /** * Sets the documents from which the terms should not be selected from. * * @Deprecated Use {@link #unlike(Item...)} instead */ @Deprecated public MoreLikeThisQueryBuilder ignoreLike(Item... docs) { return unlike(docs); } /** * Sets the text from which the terms should not be selected from. * * @Deprecated Use {@link #unlike(String...)} instead. */ @Deprecated public MoreLikeThisQueryBuilder ignoreLike(String... likeText) { return unlike(likeText); } /** * Adds a document to use in order to find documents that are "like" this. */ @Deprecated public MoreLikeThisQueryBuilder addItem(Item... likeItems) { return addLikeItem(likeItems); } <<<<<<< String likeFieldName = MoreLikeThisQueryParser.Fields.LIKE.getPreferredName(); builder.startObject(NAME); ======= builder.startObject(MoreLikeThisQueryParser.NAME); >>>>>>> builder.startObject(NAME); <<<<<<< if (boostTerms != -1) { builder.field(MoreLikeThisQueryParser.Fields.BOOST_TERMS.getPreferredName(), boostTerms); } ======= if (stopWords != null && stopWords.length > 0) { builder.field(MoreLikeThisQueryParser.Field.STOP_WORDS.getPreferredName(), stopWords); } >>>>>>> if (stopWords != null && stopWords.length > 0) { builder.field(MoreLikeThisQueryParser.Field.STOP_WORDS.getPreferredName(), stopWords); } <<<<<<< @Override public String getWriteableName() { return NAME; } ======= private static void buildLikeField(XContentBuilder builder, String fieldName, List<String> texts, List<Item> items) throws IOException { builder.startArray(fieldName); for (String text : texts) { builder.value(text); } for (Item item : items) { builder.value(item); } builder.endArray(); } >>>>>>> private static void buildLikeField(XContentBuilder builder, String fieldName, List<String> texts, List<Item> items) throws IOException { builder.startArray(fieldName); for (String text : texts) { builder.value(text); } for (Item item : items) { builder.value(item); } builder.endArray(); } @Override public String getWriteableName() { return NAME; }
<<<<<<< import org.elasticsearch.search.lookup.SearchLookup; ======= import org.elasticsearch.search.lookup.SourceLookup; >>>>>>> import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SourceLookup; <<<<<<< import java.util.function.Supplier; ======= import java.util.List; import java.util.function.BiFunction; >>>>>>> import java.util.List; import java.util.function.Supplier;
<<<<<<< if (aggFactory != null) { throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: [" + aggFactory.type + "] and [" + fieldName + "]"); ======= if (factory != null) { throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: [" + factory.type + "] and [" + fieldName + "]", parser.getTokenLocation()); >>>>>>> if (aggFactory != null) { throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: [" + aggFactory.type + "] and [" + fieldName + "]", parser.getTokenLocation()); <<<<<<< Reducer.Parser reducerParser = reducer(fieldName); if (reducerParser == null) { throw new SearchParseException(context, "Could not find aggregator type [" + fieldName + "] in [" + aggregationName + "]"); } else { reducerFactory = reducerParser.parse(aggregationName, parser, context); } } else { aggFactory = aggregatorParser.parse(aggregationName, parser, context); ======= throw new SearchParseException(context, "Could not find aggregator type [" + fieldName + "] in [" + aggregationName + "]", parser.getTokenLocation()); >>>>>>> Reducer.Parser reducerParser = reducer(fieldName); if (reducerParser == null) { throw new SearchParseException(context, "Could not find aggregator type [" + fieldName + "] in [" + aggregationName + "]", parser.getTokenLocation()); } else { reducerFactory = reducerParser.parse(aggregationName, parser, context); } } else { aggFactory = aggregatorParser.parse(aggregationName, parser, context); <<<<<<< if (aggFactory == null && reducerFactory == null) { throw new SearchParseException(context, "Missing definition for aggregation [" + aggregationName + "]"); } else if (aggFactory != null) { assert reducerFactory == null; if (metaData != null) { aggFactory.setMetaData(metaData); } ======= if (factory == null) { throw new SearchParseException(context, "Missing definition for aggregation [" + aggregationName + "]", parser.getTokenLocation()); } >>>>>>> if (aggFactory == null && reducerFactory == null) { throw new SearchParseException(context, "Missing definition for aggregation [" + aggregationName + "]", parser.getTokenLocation()); } else if (aggFactory != null) { assert reducerFactory == null; if (metaData != null) { aggFactory.setMetaData(metaData); } if (subFactories != null) { aggFactory.subFactories(subFactories); } if (level == 0) { aggFactory.validate(); } factories.addAggregator(aggFactory); } else { assert reducerFactory != null;
<<<<<<< return new ParsedDocument(uidField, versionField, seqNoField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsUpdate); ======= document.add(new LongPoint("point_field", 42)); // so that points report memory/disk usage return new ParsedDocument(versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsUpdate); >>>>>>> document.add(new LongPoint("point_field", 42)); // so that points report memory/disk usage return new ParsedDocument(versionField, seqNoField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsUpdate); <<<<<<< } }, null, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5) ); try { config.setCreate(Lucene.indexExists(store.directory()) == false); } catch (IOException e) { throw new ElasticsearchException("can't find index?", e); } ======= }}, null, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5)); >>>>>>> }}, null, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5));
<<<<<<< import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; ======= import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; >>>>>>> import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
<<<<<<< ======= import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.regex.Regex; >>>>>>> import org.elasticsearch.common.ParsingException; <<<<<<< public MultiMatchQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException { ======= public Query parse(QueryParseContext parseContext) throws IOException, ParsingException { >>>>>>> public MultiMatchQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { <<<<<<< throw new QueryParsingException(parseContext, "[" + MultiMatchQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); ======= throw new ParsingException(parseContext, "[" + NAME + "] query does not support [" + currentFieldName + "]"); >>>>>>> throw new ParsingException(parseContext, "[" + MultiMatchQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]"); <<<<<<< analyzer = parser.text(); ======= String analyzer = parser.text(); if (parseContext.analysisService().analyzer(analyzer) == null) { throw new ParsingException(parseContext, "[" + NAME + "] analyzer [" + parser.text() + "] not found"); } multiMatchQuery.setAnalyzer(analyzer); >>>>>>> analyzer = parser.text(); <<<<<<< operator = Operator.fromString(parser.text()); ======= String op = parser.text(); if ("or".equalsIgnoreCase(op)) { multiMatchQuery.setOccur(BooleanClause.Occur.SHOULD); } else if ("and".equalsIgnoreCase(op)) { multiMatchQuery.setOccur(BooleanClause.Occur.MUST); } else { throw new ParsingException(parseContext, "text query requires operator to be either 'and' or 'or', not [" + op + "]"); } >>>>>>> operator = Operator.fromString(parser.text()); <<<<<<< if (fieldsBoosts.isEmpty()) { throw new QueryParsingException(parseContext, "No fields specified for multi_match query"); ======= if (fieldNameWithBoosts.isEmpty()) { throw new ParsingException(parseContext, "No fields specified for multi_match query"); >>>>>>> if (fieldsBoosts.isEmpty()) { throw new ParsingException(parseContext, "No fields specified for multi_match query");
<<<<<<< public IndexResponse(ShardId shardId, String type, String id, long seqNo, long version, boolean created) { super(shardId, type, id, seqNo, version); this.created = created; } /** * Returns true if the document was created, false if updated. */ public boolean isCreated() { return this.created; ======= public IndexResponse(ShardId shardId, String type, String id, long version, boolean created) { super(shardId, type, id, version, created ? Result.CREATED : Result.UPDATED); >>>>>>> public IndexResponse(ShardId shardId, String type, String id, long seqNo, long version, boolean created) { super(shardId, type, id, seqNo, version, created ? Result.CREATED : Result.UPDATED); <<<<<<< builder.append(",created=").append(created); builder.append(",seqNo=").append(getSeqNo()); builder.append(",shards=").append(getShardInfo()); ======= builder.append(",result=").append(getResult().getLowercase()); builder.append(",shards=").append(Strings.toString(getShardInfo(), true)); >>>>>>> builder.append(",result=").append(getResult().getLowercase()); builder.append(",seqNo=").append(getSeqNo()); builder.append(",shards=").append(Strings.toString(getShardInfo(), true));
<<<<<<< import org.elasticsearch.action.delete.DeleteRequest; ======= import org.elasticsearch.action.bulk.BulkRequest; >>>>>>> import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.bulk.BulkRequest; <<<<<<< static Request ping() { return new Request("HEAD", "/", Collections.emptyMap(), null); } ======= static Request update(UpdateRequest updateRequest) throws IOException { String endpoint = endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update"); Params parameters = Params.builder(); parameters.withRouting(updateRequest.routing()); parameters.withParent(updateRequest.parent()); parameters.withTimeout(updateRequest.timeout()); parameters.withRefreshPolicy(updateRequest.getRefreshPolicy()); parameters.withWaitForActiveShards(updateRequest.waitForActiveShards()); parameters.withDocAsUpsert(updateRequest.docAsUpsert()); parameters.withFetchSourceContext(updateRequest.fetchSource()); parameters.withRetryOnConflict(updateRequest.retryOnConflict()); parameters.withVersion(updateRequest.version()); parameters.withVersionType(updateRequest.versionType()); // The Java API allows update requests with different content types // set for the partial document and the upsert document. This client // only accepts update requests that have the same content types set // for both doc and upsert. XContentType xContentType = null; if (updateRequest.doc() != null) { xContentType = updateRequest.doc().getContentType(); } if (updateRequest.upsertRequest() != null) { XContentType upsertContentType = updateRequest.upsertRequest().getContentType(); if ((xContentType != null) && (xContentType != upsertContentType)) { throw new IllegalStateException("Update request cannot have different content types for doc [" + xContentType + "]" + " and upsert [" + upsertContentType + "] documents"); } else { xContentType = upsertContentType; } } if (xContentType == null) { xContentType = Requests.INDEX_CONTENT_TYPE; } BytesRef source = XContentHelper.toXContent(updateRequest, xContentType, false).toBytesRef(); HttpEntity entity = new ByteArrayEntity(source.bytes, source.offset, source.length, ContentType.create(xContentType.mediaType())); return new Request(HttpPost.METHOD_NAME, endpoint, parameters.getParams(), entity); } >>>>>>> static Request ping() { return new Request("HEAD", "/", Collections.emptyMap(), null); } static Request update(UpdateRequest updateRequest) throws IOException { String endpoint = endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update"); Params parameters = Params.builder(); parameters.withRouting(updateRequest.routing()); parameters.withParent(updateRequest.parent()); parameters.withTimeout(updateRequest.timeout()); parameters.withRefreshPolicy(updateRequest.getRefreshPolicy()); parameters.withWaitForActiveShards(updateRequest.waitForActiveShards()); parameters.withDocAsUpsert(updateRequest.docAsUpsert()); parameters.withFetchSourceContext(updateRequest.fetchSource()); parameters.withRetryOnConflict(updateRequest.retryOnConflict()); parameters.withVersion(updateRequest.version()); parameters.withVersionType(updateRequest.versionType()); // The Java API allows update requests with different content types // set for the partial document and the upsert document. This client // only accepts update requests that have the same content types set // for both doc and upsert. XContentType xContentType = null; if (updateRequest.doc() != null) { xContentType = updateRequest.doc().getContentType(); } if (updateRequest.upsertRequest() != null) { XContentType upsertContentType = updateRequest.upsertRequest().getContentType(); if ((xContentType != null) && (xContentType != upsertContentType)) { throw new IllegalStateException("Update request cannot have different content types for doc [" + xContentType + "]" + " and upsert [" + upsertContentType + "] documents"); } else { xContentType = upsertContentType; } } if (xContentType == null) { xContentType = Requests.INDEX_CONTENT_TYPE; } BytesRef source = XContentHelper.toXContent(updateRequest, xContentType, false).toBytesRef(); HttpEntity entity = new ByteArrayEntity(source.bytes, source.offset, source.length, ContentType.create(xContentType.mediaType())); return new Request(HttpPost.METHOD_NAME, endpoint, parameters.getParams(), entity); }
<<<<<<< ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Version.CURRENT); DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Version.CURRENT); ======= >>>>>>> <<<<<<< ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); final IndexMetaData indexMetaData = metaData.index("test"); ShardRouting test_0 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 0, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1)) .build(); final IndexMetaData indexMetaData = metaData.index("test"); ShardRouting test_0 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 0, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); <<<<<<< ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= ShardRouting test_1 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 1, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> ShardRouting test_1 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 1, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); <<<<<<< ShardRouting test_2 = ShardRouting.newUnassigned("test", 2, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= ShardRouting test_2 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 2, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> ShardRouting test_2 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 2, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); <<<<<<< ShardRouting test_3 = ShardRouting.newUnassigned("test", 3, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= ShardRouting test_3 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 3, null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> ShardRouting test_3 = ShardRouting.newUnassigned(indexMetaData.getIndex(), 3, null, 1, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); <<<<<<< ShardRouting test_0 = ShardRouting.newUnassigned("test", 0, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= final Index index = new Index("test", "_na_"); ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> final Index index = new Index("test", "_na_"); ShardRouting test_0 = ShardRouting.newUnassigned(index, 0, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); <<<<<<< ShardRouting test_1 = ShardRouting.newUnassigned("test", 1, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> ShardRouting test_1 = ShardRouting.newUnassigned(index, 1, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); <<<<<<< ShardRouting test_2 = ShardRouting.newUnassigned("test", 2, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= ShardRouting test_2 = ShardRouting.newUnassigned(index, 2, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> ShardRouting test_2 = ShardRouting.newUnassigned(index, 2, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); <<<<<<< ShardRouting test_3 = ShardRouting.newUnassigned("test", 3, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= ShardRouting test_3 = ShardRouting.newUnassigned(index, 3, null, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> ShardRouting test_3 = ShardRouting.newUnassigned(index, 3, null, 1, false, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); <<<<<<< ShardRouting other_0 = ShardRouting.newUnassigned("other", 0, null, 1, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); ======= ShardRouting other_0 = ShardRouting.newUnassigned(new Index("other", "_NA_"), 0, null, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); >>>>>>> ShardRouting other_0 = ShardRouting.newUnassigned(new Index("other", "_NA_"), 0, null, 1, randomBoolean(), new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
<<<<<<< Map<String, String> options = (Map)in.readMap(); this.options = options; ======= Map<String, String> options = (Map<String, String>)(Map)in.readMap(); if (this.type != ScriptType.INLINE && options.isEmpty()) { this.options = null; } else { this.options = options; } >>>>>>> Map<String, String> options = (Map)in.readMap(); if (this.type != ScriptType.INLINE && options.isEmpty()) { this.options = null; } else { this.options = options; } <<<<<<< Map<String, Object> options = (Map)this.options; out.writeMap(options); ======= Map<String, Object> options = (Map<String, Object>)(Map)this.options; if (options == null) { out.writeMap(new HashMap<>()); } else { out.writeMap(options); } >>>>>>> Map<String, Object> options = (Map)this.options; if (options == null) { out.writeMap(new HashMap<>()); } else { out.writeMap(options); }
<<<<<<< import com.google.common.collect.Lists; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; ======= >>>>>>> import com.google.common.collect.Lists; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; <<<<<<< private final ArrayList<QueryBuilder> filters = Lists.newArrayList(); ======= private ArrayList<QueryBuilder> filters = new ArrayList<>(); >>>>>>> private final ArrayList<QueryBuilder> filters = new ArrayList<>();
<<<<<<< public static String getItemIdentifier(ItemStack itemStack) { String id = ServerUtil.getItemName(itemStack.getItem()); if (itemStack.getItemDamage() == 0 || itemStack.getItemDamage() == 32767) return id; else return id + ":" + itemStack.getItemDamage(); } ======= >>>>>>> <<<<<<< String id = ServerUtil.getItemName(item); config.get(CATEGORY_ITEM, id, DEFAULT_ITEM_PRICE).set(entry.getValue().getInt(DEFAULT_ITEM_PRICE));; ======= String id = GameData.getItemRegistry().getNameForObject(item); config.get(CATEGORY_ITEM, id, DEFAULT_ITEM_PRICE).set(entry.getValue().getInt(DEFAULT_ITEM_PRICE)); >>>>>>> String id = ServerUtil.getItemName(item); config.get(CATEGORY_ITEM, id, DEFAULT_ITEM_PRICE).set(entry.getValue().getInt(DEFAULT_ITEM_PRICE));
<<<<<<< import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.seqno.GlobalCheckpointService; import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction; ======= >>>>>>> import org.elasticsearch.index.seqno.GlobalCheckpointService; import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction; <<<<<<< private final NodeServicesProvider nodeServicesProvider; private final Consumer<ShardId> globalCheckpointSyncer; ======= >>>>>>> private final Consumer<ShardId> globalCheckpointSyncer; <<<<<<< PeerRecoverySourceService peerRecoverySourceService, NodeServicesProvider nodeServicesProvider, GlobalCheckpointSyncAction globalCheckpointSyncAction) { this(settings, indicesService, ======= PeerRecoverySourceService peerRecoverySourceService, SnapshotShardsService snapshotShardsService) { this(settings, (AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>>) indicesService, >>>>>>> PeerRecoverySourceService peerRecoverySourceService, SnapshotShardsService snapshotShardsService, GlobalCheckpointSyncAction globalCheckpointSyncAction) { this(settings, (AllocatedIndices<? extends Shard, ? extends AllocatedIndex<? extends Shard>>) indicesService, <<<<<<< nodeServicesProvider, globalCheckpointSyncAction::updateCheckpointForShard); ======= snapshotShardsService); >>>>>>> snapshotShardsService, globalCheckpointSyncAction::updateCheckpointForShard); <<<<<<< PeerRecoverySourceService peerRecoverySourceService, NodeServicesProvider nodeServicesProvider, Consumer<ShardId> globalCheckpointSyncer) { ======= PeerRecoverySourceService peerRecoverySourceService, SnapshotShardsService snapshotShardsService) { >>>>>>> PeerRecoverySourceService peerRecoverySourceService, SnapshotShardsService snapshotShardsService, Consumer<ShardId> globalCheckpointSyncer) { <<<<<<< this.nodeServicesProvider = nodeServicesProvider; this.globalCheckpointSyncer = globalCheckpointSyncer; ======= >>>>>>> this.globalCheckpointSyncer = globalCheckpointSyncer; <<<<<<< indexService = indicesService.createIndex(nodeServicesProvider, indexMetaData, buildInIndexListener, globalCheckpointSyncer); ======= indexService = indicesService.createIndex(indexMetaData, buildInIndexListener); >>>>>>> indexService = indicesService.createIndex(indexMetaData, buildInIndexListener, globalCheckpointSyncer); <<<<<<< U createIndex(NodeServicesProvider nodeServicesProvider, IndexMetaData indexMetaData, List<IndexEventListener> builtInIndexListener, Consumer<ShardId> globalCheckpointSyncer) throws IOException; ======= U createIndex(IndexMetaData indexMetaData, List<IndexEventListener> builtInIndexListener) throws IOException; >>>>>>> U createIndex(IndexMetaData indexMetaData, List<IndexEventListener> builtInIndexListener, Consumer<ShardId> globalCheckpointSyncer) throws IOException;
<<<<<<< public GlobalOrdinalsSignificantTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { ======= public GlobalOrdinalsSignificantTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, Map<String, Object> metaData) throws IOException { >>>>>>> public GlobalOrdinalsSignificantTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { <<<<<<< public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { super(name, factories, valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, termsAggFactory, reducers, metaData); ======= public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, Map<String, Object> metaData) throws IOException { super(name, factories, valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, termsAggFactory, metaData); >>>>>>> public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, AggregationContext aggregationContext, Aggregator parent, SignificantTermsAggregatorFactory termsAggFactory, List<Reducer> reducers, Map<String, Object> metaData) throws IOException { super(name, factories, valuesSource, bucketCountThresholds, includeExclude, aggregationContext, parent, termsAggFactory, reducers, metaData);
<<<<<<< import org.elasticsearch.common.util.concurrent.ThreadContext; ======= import org.elasticsearch.env.Environment; >>>>>>> import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; <<<<<<< .put("path.home", createTempDir().toString()) .put("name", "test-" + getTestName()) ======= .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) >>>>>>> .put("path.home", createTempDir().toString()) .put("name", "test-" + getTestName()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
<<<<<<< result.setPrincipal( deserializePrincipal( principalData, manager ) ); ======= result.setPrincipalInternal( deserializePrincipal( principalData, realm ) ); >>>>>>> result.setPrincipalInternal( deserializePrincipal( principalData, manager ) );
<<<<<<< @Override public SessionAttributesTranscoder createTranscoder( final SessionManager manager ) { return getTranscoder( manager ); ======= public SessionAttributesTranscoder createTranscoder( final SessionManager manager ) { return getTranscoder( manager.getContainer().getLoader().getClassLoader() ); } protected SessionAttributesTranscoder createTranscoder( final ClassLoader loader ) { return getTranscoder( loader ); >>>>>>> @Override public SessionAttributesTranscoder createTranscoder( final SessionManager manager ) { return getTranscoder( manager.getContainer().getLoader().getClassLoader() ); } protected SessionAttributesTranscoder createTranscoder( final ClassLoader loader ) { return getTranscoder( loader ); <<<<<<< final String defaultSerializerFactory = getSysPropValue( PROP_ENV_DEFAULT_FACTORY, KryoTranscoder.DEFAULT_SERIALIZER_FACTORY_CLASS ); _transcoder = new KryoTranscoder( manager.getContainer().getLoader().getClassLoader(), _customConverterClassNames, _copyCollectionsForSerialization, initialBufferSize, maxBufferSize, defaultSerializerFactory ); ======= _transcoder = new KryoTranscoder( classLoader, _customConverterClassNames, _copyCollectionsForSerialization, initialBufferSize, maxBufferSize ); >>>>>>> final String defaultSerializerFactory = getSysPropValue( PROP_ENV_DEFAULT_FACTORY, KryoTranscoder.DEFAULT_SERIALIZER_FACTORY_CLASS ); _transcoder = new KryoTranscoder( classLoader, _customConverterClassNames, _copyCollectionsForSerialization, initialBufferSize, maxBufferSize, defaultSerializerFactory );
<<<<<<< public FormEntryPage clickForwardButton() { onView(withText(getTranslatedString(R.string.form_forward))).perform(click()); return this; } public FormEntryPage clickBackwardButton() { onView(withText(getTranslatedString(R.string.form_backward))).perform(click()); return this; } public FormEntryPage clickOnDoNotAddGroup() { clickOnString(R.string.add_repeat_no); return this; } ======= public FormEntryPage deleteGroup(String questionText) { onView(withText(questionText)).perform(longClick()); onView(withText(R.string.delete_repeat)).perform(click()); onView(withText(R.string.discard_group)).perform(click()); return this; } public FormEntryPage deleteGroup() { onView(withId(R.id.menu_delete_child)).perform(click()); onView(withText(R.string.delete_repeat)).perform(click()); return this; } public FormEntryPage clickGoUpIcon() { onView(withId(R.id.menu_go_up)).perform(click()); return this; } public FormEntryPage showSpinnerMultipleDialog() { onView(withText(getInstrumentation().getTargetContext().getString(R.string.select_answer))).perform(click()); return this; } public FormEntryPage clickJumpStartButton() { onView(withId(R.id.jumpBeginningButton)).perform(click()); return this; } >>>>>>> public FormEntryPage deleteGroup(String questionText) { onView(withText(questionText)).perform(longClick()); onView(withText(R.string.delete_repeat)).perform(click()); onView(withText(R.string.discard_group)).perform(click()); return this; } public FormEntryPage deleteGroup() { onView(withId(R.id.menu_delete_child)).perform(click()); onView(withText(R.string.delete_repeat)).perform(click()); return this; } public FormEntryPage clickGoUpIcon() { onView(withId(R.id.menu_go_up)).perform(click()); return this; } public FormEntryPage showSpinnerMultipleDialog() { onView(withText(getInstrumentation().getTargetContext().getString(R.string.select_answer))).perform(click()); return this; } public FormEntryPage clickJumpStartButton() { onView(withId(R.id.jumpBeginningButton)).perform(click()); return this; } public FormEntryPage clickForwardButton() { onView(withText(getTranslatedString(R.string.form_forward))).perform(click()); return this; } public FormEntryPage clickBackwardButton() { onView(withText(getTranslatedString(R.string.form_backward))).perform(click()); return this; } public FormEntryPage clickOnDoNotAddGroup() { clickOnString(R.string.add_repeat_no); return this; }
<<<<<<< import java.util.ArrayList; import java.util.List; ======= >>>>>>> import java.util.ArrayList; import java.util.List; <<<<<<< // Create a ".." entry so user can go back. formList.add(new HierarchyElement("..", getString(R.string.goto_previous_level), null, Color.WHITE, QUESTION, null)); mPath.setText(getString(R.string.form_path) + getCurrentPath()); ======= mPath.setVisibility(View.VISIBLE); mPath.setText(getCurrentPath()); jumpPreviousButton.setEnabled(true); >>>>>>> mPath.setVisibility(View.VISIBLE); mPath.setText(getCurrentPath()); jumpPreviousButton.setEnabled(true); <<<<<<< new HierarchyElement(fc.getLongText(), getString(R.string.collapsed_group), getResources() .getDrawable(R.drawable.expander_ic_minimized), Color.WHITE, COLLAPSED, fc.getIndex()); ======= // TODO : update gropu count new HierarchyElement(fc.getLongText(), null, getResources() .getDrawable(R.drawable.expander_ic_minimized), Color.WHITE, COLLAPSED, fc.getIndex()); >>>>>>> // TODO : update gropu count new HierarchyElement(fc.getLongText(), null, getResources() .getDrawable(R.drawable.expander_ic_minimized), Color.WHITE, COLLAPSED, fc.getIndex()); <<<<<<< + fc.getMultiplicity(), mIndent + getString(R.string.select_repeat) + fc.getLongText() + " " + fc.getMultiplicity(), null, Color.WHITE, CHILD, fc.getIndex())); ======= + (fc.getMultiplicity() + 1), null, null, Color.WHITE, CHILD, fc .getIndex())); >>>>>>> + (fc.getMultiplicity() + 1), null, null, Color.WHITE, CHILD, fc .getIndex()));
<<<<<<< import org.odk.collect.android.dao.helpers.InstancesDaoHelper; ======= import org.odk.collect.android.exception.JavaRosaException; import org.odk.collect.android.formentry.RequiresFormController; >>>>>>> import org.odk.collect.android.dao.helpers.InstancesDaoHelper; import org.odk.collect.android.exception.JavaRosaException; import org.odk.collect.android.formentry.RequiresFormController; <<<<<<< import java.io.File; import timber.log.Timber; ======= import java.util.HashMap; >>>>>>> import java.io.File; import timber.log.Timber; import java.util.HashMap;
<<<<<<< import java.io.File; ======= import org.javarosa.core.model.FormIndex; >>>>>>> import java.io.File; import org.javarosa.core.model.FormIndex; <<<<<<< mAudioButton = new AudioButton(getContext(), audioURI); mAudioButton.setId(QuestionWidget.newUniqueId()); // random ID to be used by the // relative layout. ======= mAudioButton = new AudioButton(getContext(), mIndex, audioURI); mAudioButton.setId(3245345); // random ID to be used by the relative layout. >>>>>>> mAudioButton = new AudioButton(getContext(), mIndex, audioURI); mAudioButton.setId(QuestionWidget.newUniqueId()); // random ID to be used by the // relative layout.
<<<<<<< mInstanceUploaderTask = (InstanceUploaderTask) getLastCustomNonConfigurationInstance(); if (mInstanceUploaderTask == null) { ======= instanceUploaderTask = (InstanceUploaderTask) getLastNonConfigurationInstance(); if (instanceUploaderTask == null) { >>>>>>> instanceUploaderTask = (InstanceUploaderTask) getLastCustomNonConfigurationInstance(); if (instanceUploaderTask == null) { <<<<<<< public Object onRetainCustomNonConfigurationInstance() { return mInstanceUploaderTask; ======= public Object onRetainNonConfigurationInstance() { return instanceUploaderTask; >>>>>>> public Object onRetainCustomNonConfigurationInstance() { return instanceUploaderTask;
<<<<<<< super.onCreate(savedInstanceState); mToolbar.setTitle(getString(R.string.get_forms)); mAlertMsg = getString(R.string.please_wait); ======= setTitle(getString(R.string.get_forms)); alertMsg = getString(R.string.please_wait); >>>>>>> super.onCreate(savedInstanceState); mToolbar.setTitle(getString(R.string.get_forms)); alertMsg = getString(R.string.please_wait); <<<<<<< public void onItemClick(AdapterView<?> parent, View view, int position, long id) { toggleButtonLabel(mToggleButton, getListView()); mDownloadButton.setEnabled(getListView().getCheckedItemCount() > 0); ======= protected void onListItemClick(ListView l, View v, int position, long id) { super.onListItemClick(l, v, position, id); toggleButtonLabel(toggleButton, getListView()); downloadButton.setEnabled(getListView().getCheckedItemCount() > 0); >>>>>>> public void onItemClick(AdapterView<?> parent, View view, int position, long id) { toggleButtonLabel(toggleButton, getListView()); downloadButton.setEnabled(getListView().getCheckedItemCount() > 0); <<<<<<< public Object onRetainCustomNonConfigurationInstance() { if (mDownloadFormsTask != null) { return mDownloadFormsTask; ======= public Object onRetainNonConfigurationInstance() { if (downloadFormsTask != null) { return downloadFormsTask; >>>>>>> public Object onRetainCustomNonConfigurationInstance() { if (downloadFormsTask != null) { return downloadFormsTask; <<<<<<< if (mFormList.size() == 0) { mFormList.add(item); emptyView.setVisibility(View.VISIBLE); ======= if (formList.size() == 0) { formList.add(item); >>>>>>> if (formList.size() == 0) { formList.add(item); emptyView.setVisibility(View.VISIBLE);
<<<<<<< new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { boolean first = true; selectionText.setText(""); for (int i = 0; i < selections.length; i++) { if (selections[i]) { if (first) { first = false; selectionText.setText(String.format(context.getString(R.string.selected_answer), answer_items[i].toString())); selectionText.setVisibility(View.VISIBLE); } else { selectionText.setText(String.format(context.getString(R.string.selected_answer_with_comma), selectionText.getText().toString(), answer_items[i].toString())); ======= new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { boolean first = true; selectionText.setText(""); for (int i = 0; i < selections.length; i++) { if (selections[i]) { if (first) { first = false; selectionText.setText( context.getString(R.string.selected) + answer_items[i].toString()); selectionText.setVisibility(View.VISIBLE); } else { selectionText.setText(selectionText.getText() + ", " + answer_items[i].toString()); } >>>>>>> new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { boolean first = true; selectionText.setText(""); for (int i = 0; i < selections.length; i++) { if (selections[i]) { if (first) { first = false; selectionText.setText(String.format(context.getString(R.string.selected_answer), answer_items[i].toString())); selectionText.setVisibility(View.VISIBLE); } else { selectionText.setText(String.format(context.getString(R.string.selected_answer_with_comma), selectionText.getText().toString(), answer_items[i].toString())); }
<<<<<<< import org.odk.collect.android.formentry.FormSaveViewModel; ======= import org.odk.collect.android.formentry.repeats.AddRepeatDialog; >>>>>>> import org.odk.collect.android.formentry.FormSaveViewModel; import org.odk.collect.android.formentry.repeats.AddRepeatDialog;
<<<<<<< setupStrictMode(); ======= initMapProviders(); >>>>>>> setupStrictMode(); initMapProviders(); <<<<<<< /** * Enable StrictMode and log violations to the system log. * This catches disk and network access on the main thread, as well as leaked SQLite * cursors and unclosed resources. */ private void setupStrictMode() { if (BuildConfig.DEBUG) { StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder() .detectAll() .permitDiskReads() // shared preferences are being read on main thread .penaltyLog() .build()); StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder() .detectAll() .penaltyLog() .build()); } } ======= private void initMapProviders() { new com.google.android.gms.maps.MapView(this).onCreate(null); MapboxUtils.initMapbox(); } >>>>>>> /** * Enable StrictMode and log violations to the system log. * This catches disk and network access on the main thread, as well as leaked SQLite * cursors and unclosed resources. */ private void setupStrictMode() { if (BuildConfig.DEBUG) { StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder() .detectAll() .permitDiskReads() // shared preferences are being read on main thread .penaltyLog() .build()); StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder() .detectAll() .penaltyLog() .build()); } private void initMapProviders() { new com.google.android.gms.maps.MapView(this).onCreate(null); MapboxUtils.initMapbox(); }
<<<<<<< import org.odk.collect.android.geo.MapProvider; ======= import org.odk.collect.android.jobs.CollectJobCreator; >>>>>>> import org.odk.collect.android.geo.MapProvider; import org.odk.collect.android.jobs.CollectJobCreator; <<<<<<< @Provides @Singleton public MapProvider providesMapProvider() { return new MapProvider(); } ======= @Provides public StorageStateProvider providesStorageStateProvider() { return new StorageStateProvider(); } @Provides public StoragePathProvider providesStoragePathProvider() { return new StoragePathProvider(); } @Provides public AdminPasswordProvider providesAdminPasswordProvider() { return new AdminPasswordProvider(AdminSharedPreferences.getInstance()); } @Provides public CollectJobCreator providesCollectJobCreator() { return new CollectJobCreator(); } >>>>>>> @Provides @Singleton public MapProvider providesMapProvider() { return new MapProvider(); } @Provides public StorageStateProvider providesStorageStateProvider() { return new StorageStateProvider(); } @Provides public StoragePathProvider providesStoragePathProvider() { return new StoragePathProvider(); } @Provides public AdminPasswordProvider providesAdminPasswordProvider() { return new AdminPasswordProvider(AdminSharedPreferences.getInstance()); } @Provides public CollectJobCreator providesCollectJobCreator() { return new CollectJobCreator(); }
<<<<<<< toolbar.findViewById(R.id.menu_goto).setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { FormController formController = Collect.getInstance() .getFormController(); Collect.getInstance() .getActivityLogger() .logInstanceAction(this, "onOptionsItemSelected", "MENU_HIERARCHY_VIEW"); if (formController.currentPromptIsQuestion()) { saveAnswersForCurrentScreen(DO_NOT_EVALUATE_CONSTRAINTS); } Intent i = new Intent(FormEntryActivity.this, FormHierarchyActivity.class); i.putExtra(ApplicationConstants.BundleKeys.FORM_MODE, ApplicationConstants.FormModes.EDIT_SAVED); startActivityForResult(i, HIERARCHY_ACTIVITY); } }); boolean hasHardwareMenu = ViewConfigurationCompat.hasPermanentMenuKey(ViewConfiguration.get(getApplicationContext())); if (!hasHardwareMenu) { setSupportActionBar(toolbar); } mFormsDao = new FormsDao(); ======= formsDao = new FormsDao(); >>>>>>> toolbar.findViewById(R.id.menu_goto).setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { FormController formController = Collect.getInstance() .getFormController(); Collect.getInstance() .getActivityLogger() .logInstanceAction(this, "onOptionsItemSelected", "MENU_HIERARCHY_VIEW"); if (formController.currentPromptIsQuestion()) { saveAnswersForCurrentScreen(DO_NOT_EVALUATE_CONSTRAINTS); } Intent i = new Intent(FormEntryActivity.this, FormHierarchyActivity.class); i.putExtra(ApplicationConstants.BundleKeys.FORM_MODE, ApplicationConstants.FormModes.EDIT_SAVED); startActivityForResult(i, HIERARCHY_ACTIVITY); } }); boolean hasHardwareMenu = ViewConfigurationCompat.hasPermanentMenuKey(ViewConfiguration.get(getApplicationContext())); if (!hasHardwareMenu) { setSupportActionBar(toolbar); } formsDao = new FormsDao(); <<<<<<< ((animation == mInAnimation) ? "in" : ((animation == mOutAnimation) ? "out" : "other"))); if (mInAnimation == animation) { mAnimationCompletionSet |= 1; } else if (mOutAnimation == animation) { mAnimationCompletionSet |= 2; ======= ((animation == inAnimation) ? "in" : ((animation == outAnimation) ? "out" : "other"))); if (inAnimation == animation) { animationCompletionSet |= 1; } else if (outAnimation == animation) { animationCompletionSet |= 2; >>>>>>> ((animation == inAnimation) ? "in" : ((animation == outAnimation) ? "out" : "other"))); if (inAnimation == animation) { animationCompletionSet |= 1; } else if (outAnimation == animation) { animationCompletionSet |= 2; <<<<<<< ((animation == mInAnimation) ? "in" : ((animation == mOutAnimation) ? "out" : "other"))); ======= ((animation == inAnimation) ? "in" : ((animation == outAnimation) ? "out" : "other"))); >>>>>>> ((animation == inAnimation) ? "in" : ((animation == outAnimation) ? "out" : "other"))); <<<<<<< ((animation == mInAnimation) ? "in" : ((animation == mOutAnimation) ? "out" : "other"))); ======= ((animation == inAnimation) ? "in" : ((animation == outAnimation) ? "out" : "other"))); >>>>>>> ((animation == inAnimation) ? "in" : ((animation == outAnimation) ? "out" : "other")));
<<<<<<< return showSentAndUnsentChoices(); } private boolean showSentAndUnsentChoices() { /** ======= Collect.getInstance().getActivityLogger().logAction(this, "toggleButton.longClick", Boolean.toString(!mShowUnsent)); /** >>>>>>> Collect.getInstance().getActivityLogger().logAction(this, "toggleButton.longClick", Boolean.toString(!mShowUnsent)); return showSentAndUnsentChoices(); } private boolean showSentAndUnsentChoices() { /**
<<<<<<< marker.setIcon(ContextCompat.getDrawable(getApplicationContext(), R.drawable.ic_place_black)); ======= marker.setIcon(ContextCompat.getDrawable(this, R.drawable.ic_place)); >>>>>>> marker.setIcon(ContextCompat.getDrawable(this, R.drawable.ic_place_black)); <<<<<<< marker.setIcon(ContextCompat.getDrawable(getApplicationContext(), R.drawable.ic_place_black)); ======= marker.setIcon(ContextCompat.getDrawable(GeoShapeOsmMapActivity.this, R.drawable.ic_place)); >>>>>>> marker.setIcon(ContextCompat.getDrawable(GeoShapeOsmMapActivity.this, R.drawable.ic_place_black));
<<<<<<< import org.odk.collect.android.preferences.qr.QRCodeTabsActivity; import org.odk.collect.android.preferences.qr.ShowQRCodeFragment; ======= >>>>>>> import org.odk.collect.android.preferences.qr.QRCodeTabsActivity; import org.odk.collect.android.preferences.qr.ShowQRCodeFragment;
<<<<<<< public class BearingActivity extends AppCompatActivity implements SensorEventListener { private ProgressDialog mBearingDialog; ======= public class BearingActivity extends Activity implements SensorEventListener { private ProgressDialog bearingDialog; >>>>>>> public class BearingActivity extends AppCompatActivity implements SensorEventListener { private ProgressDialog bearingDialog;
<<<<<<< mToolbar.setTitle(getString(R.string.review_data)); mEditMode = true; mSortingOptions = new String[]{ ======= setTitle(getString(R.string.review_data)); editMode = true; sortingOptions = new String[]{ >>>>>>> mToolbar.setTitle(getString(R.string.review_data)); editMode = true; sortingOptions = new String[]{ <<<<<<< mToolbar.setTitle(getString(R.string.view_sent_forms)); mSortingOptions = new String[]{ ======= setTitle(getString(R.string.view_sent_forms)); sortingOptions = new String[]{ >>>>>>> mToolbar.setTitle(getString(R.string.view_sent_forms)); sortingOptions = new String[]{
<<<<<<< import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.servlet.http.HttpServletRequest; ======= >>>>>>> import java.util.HashMap; import java.util.Map; <<<<<<< * Regular expression which matches any IPv4 address. */ private static final String IPV4_ADDRESS_REGEX = "([0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3})"; /** * Regular expression which matches any IPv6 address. */ private static final String IPV6_ADDRESS_REGEX = "([0-9a-fA-F]*(:[0-9a-fA-F]*){0,7})"; /** * Regular expression which matches any IP address, regardless of version. */ private static final String IP_ADDRESS_REGEX = "(" + IPV4_ADDRESS_REGEX + "|" + IPV6_ADDRESS_REGEX + ")"; /** * Pattern which matches valid values of the de-facto standard * "X-Forwarded-For" header. */ private static final Pattern X_FORWARDED_FOR = Pattern.compile("^" + IP_ADDRESS_REGEX + "(, " + IP_ADDRESS_REGEX + ")*$"); /** * Arbitrary attributes associated with this RemoteAuthenticatedUser object. */ private Map<String, String> attributes = new HashMap<String, String>(); @Override public Map<String, String> getAttributes() { return attributes; } @Override public void setAttributes(Map<String, String> attributes) { this.attributes = attributes; } /** * Derives the remote host of the authenticating user from the given * credentials object. The remote host is derived from X-Forwarded-For * in addition to the actual source IP of the request, and thus is not * trusted. The derived remote host is really only useful for logging, * unless the server is configured such that X-Forwarded-For is guaranteed * to be trustworthy. * * @param credentials * The credentials to derive the remote host from. * * @return * The remote host from which the user with the given credentials is * authenticating. */ private static String getRemoteHost(Credentials credentials) { HttpServletRequest request = credentials.getRequest(); // Use X-Forwarded-For, if present and valid String header = request.getHeader("X-Forwarded-For"); if (header != null) { Matcher matcher = X_FORWARDED_FOR.matcher(header); if (matcher.matches()) return matcher.group(1); } // If header absent or invalid, just use source IP return request.getRemoteAddr(); } /** ======= >>>>>>> * Arbitrary attributes associated with this RemoteAuthenticatedUser object. */ private Map<String, String> attributes = new HashMap<String, String>(); @Override public Map<String, String> getAttributes() { return attributes; } @Override public void setAttributes(Map<String, String> attributes) { this.attributes = attributes; } /**
<<<<<<< import org.apache.guacamole.auth.ldap.user.LDAPAuthenticatedUser; ======= import org.apache.guacamole.auth.ldap.ObjectQueryService; import org.apache.guacamole.auth.ldap.group.UserGroupService; >>>>>>> import org.apache.guacamole.auth.ldap.ObjectQueryService; import org.apache.guacamole.auth.ldap.group.UserGroupService; import org.apache.guacamole.auth.ldap.user.LDAPAuthenticatedUser; <<<<<<< // Produce connections for each readable configuration Map<String, Connection> connections = new HashMap<String, Connection>(); while (results.hasMore()) { try { LDAPEntry entry = results.next(); ======= // Build token filter containing credential tokens TokenFilter tokenFilter = new TokenFilter(); StandardTokens.addStandardTokens(tokenFilter, user); // Return a map of all readable connections return queryService.asMap(results, (entry) -> { >>>>>>> // Return a map of all readable connections return queryService.asMap(results, (entry) -> { <<<<<<< // If group base DN is specified search for user groups String groupBaseDN = confService.getGroupBaseDN(); if (groupBaseDN != null) { // Get all groups the user is a member of starting at the groupBaseDN, excluding guacConfigGroups LDAPSearchResults userRoleGroupResults = ldapConnection.search( groupBaseDN, LDAPConnection.SCOPE_SUB, "(&(!(objectClass=guacConfigGroup))(" + escapingService.escapeLDAPSearchFilter( confService.getMemberAttribute()) + "=" + escapingService.escapeLDAPSearchFilter(userDN) + "))", null, false, confService.getLDAPSearchConstraints() ); // Append the additional user groups to the LDAP filter // Now the filter will also look for guacConfigGroups that refer // to groups the user is a member of // The guacConfig group uses the seeAlso attribute to refer // to these other groups while (userRoleGroupResults.hasMore()) { try { LDAPEntry entry = userRoleGroupResults.next(); connectionSearchFilter.append("(seeAlso=").append(escapingService.escapeLDAPSearchFilter(entry.getDN())).append(")"); } catch (LDAPReferralException e) { if (confService.getFollowReferrals()) { logger.error("Could not follow referral: {}", e.getFailedReferral()); logger.debug("Error encountered trying to follow referral.", e); throw new GuacamoleServerException("Could not follow LDAP referral.", e); } else { logger.warn("Given a referral, but referrals are disabled. Error was: {}", e.getMessage()); logger.debug("Got a referral, but configured to not follow them.", e); } } } ======= // Additionally filter by group membership if the current user is a // member of any user groups List<LDAPEntry> userGroups = userGroupService.getParentUserGroupEntries(ldapConnection, userDN); if (!userGroups.isEmpty()) { for (LDAPEntry entry : userGroups) connectionSearchFilter.append("(seeAlso=").append(escapingService.escapeLDAPSearchFilter(entry.getDN())).append(")"); >>>>>>> // Additionally filter by group membership if the current user is a // member of any user groups List<LDAPEntry> userGroups = userGroupService.getParentUserGroupEntries(ldapConnection, userDN); if (!userGroups.isEmpty()) { for (LDAPEntry entry : userGroups) connectionSearchFilter.append("(seeAlso=").append(escapingService.escapeLDAPSearchFilter(entry.getDN())).append(")");
<<<<<<< import java.util.HashMap; import java.util.Map; ======= import java.util.Collections; import java.util.Set; >>>>>>> import java.util.HashMap; import java.util.Map; import java.util.Collections; import java.util.Set; <<<<<<< * Arbitrary attributes associated with this RemoteAuthenticatedUser object. */ private Map<String, String> attributes = new HashMap<String, String>(); @Override public Map<String, String> getAttributes() { return attributes; } @Override public void setAttributes(Map<String, String> attributes) { this.attributes = attributes; } /** ======= * The identifiers of any groups of which this user is a member, including * groups inherited through membership in other groups. */ private final Set<String> effectiveGroups; /** >>>>>>> * Arbitrary attributes associated with this RemoteAuthenticatedUser object. */ private Map<String, String> attributes = new HashMap<String, String>(); /** * The identifiers of any groups of which this user is a member, including * groups inherited through membership in other groups. */ private final Set<String> effectiveGroups; @Override public Map<String, String> getAttributes() { return attributes; } @Override public void setAttributes(Map<String, String> attributes) { this.attributes = attributes; } /**
<<<<<<< import io.reactivex.ObservableEmitter; ======= import io.reactivex.Observable; import io.reactivex.disposables.Disposable; >>>>>>> import io.reactivex.Observable; import io.reactivex.ObservableEmitter; import io.reactivex.disposables.Disposable; <<<<<<< import java.util.LinkedList; import java.util.List; import java.util.Timer; import java.util.TimerTask; ======= import java.util.concurrent.TimeUnit; >>>>>>> import java.util.LinkedList; import java.util.List; import java.util.concurrent.TimeUnit; <<<<<<< pingPongTimer = new Timer("OkexPingPong", false); pingPongTimer.schedule(new TimerTask() { @Override public void run() { if (OkCoinStreamingService.this.isSocketOpen()) { OkCoinStreamingService.this.sendMessage("{\"event\":\"ping\"}"); } } }, 15 * 1000, 15 * 1000); ======= pingPongSubscription = pingPongSrc.subscribe(o -> { this.sendMessage("{\"event\":\"ping\"}"); }); >>>>>>> pingPongSubscription = pingPongSrc.subscribe(o -> { this.sendMessage("{\"event\":\"ping\"}"); }); <<<<<<< public void addDelayEmitter(ObservableEmitter<Long> delayEmitter) { delayEmitters.add(delayEmitter); } ======= @Override protected WebSocketClientHandler getWebSocketClientHandler(WebSocketClientHandshaker handshaker, WebSocketClientHandler.WebSocketMessageHandler handler) { return new OkCoinNettyWebSocketClientHandler(handshaker, handler); } protected class OkCoinNettyWebSocketClientHandler extends NettyWebSocketClientHandler { protected OkCoinNettyWebSocketClientHandler(WebSocketClientHandshaker handshaker, WebSocketMessageHandler handler) { super(handshaker, handler); } @Override public void channelInactive(ChannelHandlerContext ctx) { if (pingPongSubscription != null && !pingPongSubscription.isDisposed()) { pingPongSubscription.dispose(); } super.channelInactive(ctx); } } >>>>>>> public void addDelayEmitter(ObservableEmitter<Long> delayEmitter) { delayEmitters.add(delayEmitter); } @Override protected WebSocketClientHandler getWebSocketClientHandler(WebSocketClientHandshaker handshaker, WebSocketClientHandler.WebSocketMessageHandler handler) { return new OkCoinNettyWebSocketClientHandler(handshaker, handler); } protected class OkCoinNettyWebSocketClientHandler extends NettyWebSocketClientHandler { protected OkCoinNettyWebSocketClientHandler(WebSocketClientHandshaker handshaker, WebSocketMessageHandler handler) { super(handshaker, handler); } @Override public void channelInactive(ChannelHandlerContext ctx) { if (pingPongSubscription != null && !pingPongSubscription.isDisposed()) { pingPongSubscription.dispose(); } super.channelInactive(ctx); } }
<<<<<<< import io.reactivex.Completable; import io.reactivex.CompletableSource; ======= import io.reactivex.Emitter; import io.reactivex.ObservableEmitter; >>>>>>> import io.reactivex.Completable; import io.reactivex.CompletableSource; import io.reactivex.ObservableEmitter; <<<<<<< import java.util.Timer; import java.util.TimerTask; ======= import java.util.LinkedList; import java.util.List; >>>>>>> import java.util.LinkedList; import java.util.List; import java.util.Timer; import java.util.TimerTask; <<<<<<< private Timer pingPongTimer = null; ======= private List<ObservableEmitter<Long>> delayEmitters = new LinkedList<>(); >>>>>>> private Timer pingPongTimer = null; private List<ObservableEmitter<Long>> delayEmitters = new LinkedList<>(); <<<<<<< if (message.get("event") != null && "pong".equals(message.get("event").asText()) ) { // ignore pong message return; } if (message.get("data") != null) { if (message.get("data").has("result")) { boolean success = message.get("data").get("result").asBoolean(); ======= JsonNode data = message.get("data"); if (data != null) { if (data.has("result")) { boolean success = data.get("result").asBoolean(); >>>>>>> JsonNode data = message.get("data"); if (data != null) { if (data.has("result")) { boolean success = data.get("result").asBoolean(); <<<<<<< super.handleError(message, new ExchangeException("Error code: " + message.get("data").get("error_code").asText())); } else { super.handleMessage(message); ======= super.handleError(message, new ExchangeException("Error code: " + data.get("error_code").asText())); >>>>>>> super.handleError(message, new ExchangeException("Error code: " + data.get("error_code").asText()));
<<<<<<< import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import info.bitrich.xchangestream.bitmex.dto.*; ======= import info.bitrich.xchangestream.bitmex.dto.*; >>>>>>> import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import info.bitrich.xchangestream.bitmex.dto.*; <<<<<<< import org.knowm.xchange.bitmex.BitmexContract; import org.knowm.xchange.bitmex.BitmexPrompt; import org.knowm.xchange.bitmex.BitmexUtils; ======= import org.knowm.xchange.bitmex.BitmexExchange; import org.knowm.xchange.bitmex.BitmexPrompt; >>>>>>> import org.knowm.xchange.bitmex.BitmexExchange; import org.knowm.xchange.bitmex.BitmexPrompt; <<<<<<< import java.io.IOException; import java.util.*; ======= import java.util.*; >>>>>>> import java.io.IOException; import java.util.*; <<<<<<< this.streamingService.subscribeConnectionSuccess().subscribe(o -> { LOG.info("Bitmex connection succeeded. Clearing orderbooks."); orderbooks.clear(); }); } private String getBitmexSymbol(CurrencyPair currencyPair, Object... args) { if (args.length > 0) { BitmexPrompt prompt = (BitmexPrompt) args[0]; BitmexContract contract = new BitmexContract(currencyPair, prompt); return BitmexUtils.translateBitmexContract(contract); } else { return currencyPair.base.toString() + currencyPair.counter.toString(); } ======= this.bitmexExchange = bitmexExchange; } private String getBitmexSymbol(CurrencyPair currencyPair, Object... args) { if (args.length > 0 && args[0] != null) { BitmexPrompt prompt = (BitmexPrompt) args[0]; currencyPair = bitmexExchange.determineActiveContract(currencyPair.base.toString(), currencyPair.counter.toString(), prompt); } return currencyPair.base.toString() + currencyPair.counter.toString(); >>>>>>> this.streamingService.subscribeConnectionSuccess().subscribe(o -> { LOG.info("Bitmex connection succeeded. Clearing orderbooks."); orderbooks.clear(); }); this.bitmexExchange = bitmexExchange; } private String getBitmexSymbol(CurrencyPair currencyPair, Object... args) { if (args.length > 0 && args[0] != null) { BitmexPrompt prompt = (BitmexPrompt) args[0]; currencyPair = bitmexExchange.determineActiveContract(currencyPair.base.toString(), currencyPair.counter.toString(), prompt); } return currencyPair.base.toString() + currencyPair.counter.toString(); <<<<<<< return new OrderBook(null, Collections.emptyList(), Collections.emptyList()); ======= return new OrderBook(new Date(), Collections.emptyList(), Collections.emptyList()); >>>>>>> return new OrderBook(new Date(), Collections.emptyList(), Collections.emptyList()); <<<<<<< public Observable<BitmexExecution> getExecutions(String symbol) { return streamingService.subscribeBitmexChannel("execution:" + symbol).flatMapIterable(s -> { JsonNode executions = s.getData(); List<BitmexExecution> bitmexExecutions = new ArrayList<>(executions.size()); for (JsonNode execution : executions) { bitmexExecutions.add(objectMapper.treeToValue(execution, BitmexExecution.class)); } return bitmexExecutions; }); } public void enableDeadManSwitch() throws IOException { enableDeadManSwitch(BitmexStreamingService.DMS_RESUBSCRIBE, BitmexStreamingService.DMS_CANCEL_ALL_IN); } /** * @param rate in milliseconds to send updated * @param timeout milliseconds from now after which orders will be cancelled */ public void enableDeadManSwitch(long rate, long timeout) throws IOException { streamingService.enableDeadMansSwitch(rate, timeout); } public boolean isDeadManSwitchEnabled() throws IOException { return streamingService.isDeadMansSwitchEnabled(); } public void disableDeadMansSwitch() throws IOException { streamingService.disableDeadMansSwitch(); } ======= public Observable<BitmexFunding> getFunding() { String channelName = "funding"; return streamingService.subscribeBitmexChannel(channelName).map(BitmexWebSocketTransaction::toBitmexFunding); } >>>>>>> public Observable<BitmexExecution> getExecutions(String symbol) { return streamingService.subscribeBitmexChannel("execution:" + symbol).flatMapIterable(s -> { JsonNode executions = s.getData(); List<BitmexExecution> bitmexExecutions = new ArrayList<>(executions.size()); for (JsonNode execution : executions) { bitmexExecutions.add(objectMapper.treeToValue(execution, BitmexExecution.class)); } return bitmexExecutions; }); } public void enableDeadManSwitch() throws IOException { enableDeadManSwitch(BitmexStreamingService.DMS_RESUBSCRIBE, BitmexStreamingService.DMS_CANCEL_ALL_IN); } /** * @param rate in milliseconds to send updated * @param timeout milliseconds from now after which orders will be cancelled */ public void enableDeadManSwitch(long rate, long timeout) throws IOException { streamingService.enableDeadMansSwitch(rate, timeout); } public boolean isDeadManSwitchEnabled() throws IOException { return streamingService.isDeadMansSwitchEnabled(); } public void disableDeadMansSwitch() throws IOException { streamingService.disableDeadMansSwitch(); } public Observable<BitmexFunding> getFunding() { String channelName = "funding"; return streamingService.subscribeBitmexChannel(channelName).map(BitmexWebSocketTransaction::toBitmexFunding); }
<<<<<<< import io.netty.handler.codec.http.websocketx.extensions.WebSocketClientExtensionHandler; import io.netty.handler.codec.http.websocketx.extensions.compression.WebSocketClientCompressionHandler; ======= import com.fasterxml.jackson.core.JsonProcessingException; import io.reactivex.Completable; import io.reactivex.CompletableSource; import org.knowm.xchange.ExchangeSpecification; >>>>>>> import io.netty.handler.codec.http.websocketx.extensions.WebSocketClientExtensionHandler; import com.fasterxml.jackson.core.JsonProcessingException; import io.reactivex.Completable; import io.reactivex.CompletableSource; import org.knowm.xchange.ExchangeSpecification;
<<<<<<< import info.bitrich.xchangestream.service.ConnectableService; import info.bitrich.xchangestream.service.netty.NettyStreamingService; ======= import io.netty.channel.ChannelHandlerContext; >>>>>>> import info.bitrich.xchangestream.service.ConnectableService; import info.bitrich.xchangestream.service.netty.NettyStreamingService; import io.netty.channel.ChannelHandlerContext; <<<<<<< import org.knowm.xchange.exceptions.NotYetImplementedForExchangeException; import org.knowm.xchange.ExchangeSpecification; ======= import org.knowm.xchange.exceptions.NotYetImplementedForExchangeException; >>>>>>> import org.knowm.xchange.ExchangeSpecification; import org.knowm.xchange.exceptions.NotYetImplementedForExchangeException; <<<<<<< * Observable for reconnection failure event. * When this happens, it usually indicates that the server or the network is down. * * @return Observable with the exception during reconnection. */ default Observable<Throwable> reconnectFailure() { throw new NotYetImplementedForExchangeException(); } /** * Observable for connection success event. * When this happens, it usually indicates that the server or the network is down. * * @return Observable with the exception during reconnection. */ default Observable<Object> connectionSuccess() { throw new NotYetImplementedForExchangeException(); } /** ======= * Observable for disconnection event. * * @return Observable with the exception during reconnection. */ default Observable<ChannelHandlerContext> disconnectObservable() { throw new NotYetImplementedForExchangeException(); } /** * Observable for reconnection failure event. * When this happens, it usually indicates that the server or the network is down. * * @return Observable with the exception during reconnection. */ default Observable<Throwable> reconnectFailure() { throw new NotYetImplementedForExchangeException(); } /** * Observable for message delay measure. * Every time when the client received a message with a timestamp, the delay time is calculated and pushed to subscribers. * * @return Observable with the message delay measure. */ default Observable<Long> messageDelay() { throw new NotYetImplementedForExchangeException(); } default void resubscribeChannels() { throw new NotYetImplementedForExchangeException(); } /** >>>>>>> * Observable for reconnection failure event. * When this happens, it usually indicates that the server or the network is down. * * @return Observable with the exception during reconnection. */ default Observable<Throwable> reconnectFailure() { throw new NotYetImplementedForExchangeException(); } /** * Observable for connection success event. * When this happens, it usually indicates that the server or the network is down. * * @return Observable with the exception during reconnection. */ default Observable<Object> connectionSuccess() { throw new NotYetImplementedForExchangeException(); } /** * Observable for disconnection event. * * @return Observable with the exception during reconnection. */ default Observable<ChannelHandlerContext> disconnectObservable() { throw new NotYetImplementedForExchangeException(); } /** * Observable for message delay measure. * Every time when the client received a message with a timestamp, the delay time is calculated and pushed to subscribers. * * @return Observable with the message delay measure. */ default Observable<Long> messageDelay() { throw new NotYetImplementedForExchangeException(); } default void resubscribeChannels() { throw new NotYetImplementedForExchangeException(); } /**
<<<<<<< GDAXWebSocketSubscriptionMessage subscribeMessage = new GDAXWebSocketSubscriptionMessage(SUBSCRIBE, product); ======= GDAXWebSocketSubscriptionMessage subscribeMessage = new GDAXWebSocketSubscriptionMessage(SUBSCRIBE, product, authData.get()); ObjectMapper objectMapper = new ObjectMapper(); >>>>>>> GDAXWebSocketSubscriptionMessage subscribeMessage = new GDAXWebSocketSubscriptionMessage(SUBSCRIBE, product, authData.get()); <<<<<<< new GDAXWebSocketSubscriptionMessage(UNSUBSCRIBE, new String[]{"level2", "matches", "ticker"}); ======= new GDAXWebSocketSubscriptionMessage(UNSUBSCRIBE, new String[]{"level2", "matches", "ticker"}, authData.get()); ObjectMapper objectMapper = new ObjectMapper(); >>>>>>> new GDAXWebSocketSubscriptionMessage(UNSUBSCRIBE, new String[]{"level2", "matches", "ticker"}, authData.get());
<<<<<<< ======= import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; >>>>>>> import com.fasterxml.jackson.core.JsonProcessingException; <<<<<<< import info.bitrich.xchangestream.bitmex.dto.BitmexMarketDataEvent; ======= import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.JsonNodeType; >>>>>>> import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.JsonNodeType; import info.bitrich.xchangestream.bitmex.dto.BitmexMarketDataEvent; <<<<<<< import io.netty.handler.codec.http.DefaultHttpHeaders; import io.netty.handler.codec.http.websocketx.extensions.WebSocketClientExtensionHandler; ======= import io.netty.handler.codec.http.websocketx.extensions.WebSocketClientExtensionHandler; import io.reactivex.Completable; import io.reactivex.CompletableSource; >>>>>>> import io.netty.handler.codec.http.DefaultHttpHeaders; import io.netty.handler.codec.http.websocketx.extensions.WebSocketClientExtensionHandler; import io.reactivex.Completable; import io.reactivex.CompletableSource; <<<<<<< import io.reactivex.disposables.Disposable; import io.reactivex.schedulers.Schedulers; import org.knowm.xchange.bitmex.service.BitmexDigest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.time.ZoneOffset; import java.util.TimeZone; import java.util.concurrent.TimeUnit; ======= import io.reactivex.ObservableEmitter; import org.knowm.xchange.ExchangeSpecification; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; >>>>>>> import io.reactivex.ObservableEmitter; import io.reactivex.disposables.Disposable; import io.reactivex.schedulers.Schedulers; import org.knowm.xchange.ExchangeSpecification; import org.knowm.xchange.bitmex.service.BitmexDigest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.time.ZoneOffset; import java.util.*; import java.util.concurrent.TimeUnit; <<<<<<< ======= private final ObjectMapper mapper = new ObjectMapper(); private List<ObservableEmitter<Long>> delayEmitters = new LinkedList<>(); protected ExchangeSpecification exchangeSpecification; >>>>>>> private final ObjectMapper mapper = new ObjectMapper(); private List<ObservableEmitter<Long>> delayEmitters = new LinkedList<>(); protected ExchangeSpecification exchangeSpecification; <<<<<<< if (message.has("info") || message.has("success")) { return; } if (message.has("error")) { String error = message.get("error").asText(); LOG.error("Error with message: " + error); return; } if (message.has("now") && message.has("cancelTime")) { handleDeadMansSwitchMessage(message); return; ======= if (!delayEmitters.isEmpty() && message.has("data")) { String table = ""; if (message.has("table")) { table = message.get("table").asText(); } JsonNode data = message.get("data"); if (data.getNodeType().equals(JsonNodeType.ARRAY)) { Long current = System.currentTimeMillis(); SimpleDateFormat formatter; formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); formatter.setTimeZone(TimeZone.getTimeZone("UTC")); JsonNode d = data.get(0); if (d != null && d.has("timestamp") && (!"order".equals(table) || d.has("ordStatus") && "NEW".equals(d.get("ordStatus").asText()))) { try { String timestamp = d.get("timestamp").asText(); Date date = formatter.parse(timestamp); long delay = current - date.getTime(); for (ObservableEmitter<Long> emitter : delayEmitters) { emitter.onNext(delay); } } catch (ParseException e) { LOG.error("Parsing timestamp error: ", e); } } } } if (message.has("info") || message.has("success")) { return; } if (message.has("error")) { String error = message.get("error").asText(); LOG.error("Error with message: " + error); return; } >>>>>>> if (!delayEmitters.isEmpty() && message.has("data")) { String table = ""; if (message.has("table")) { table = message.get("table").asText(); } JsonNode data = message.get("data"); if (data.getNodeType().equals(JsonNodeType.ARRAY)) { Long current = System.currentTimeMillis(); SimpleDateFormat formatter; formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); formatter.setTimeZone(TimeZone.getTimeZone("UTC")); JsonNode d = data.get(0); if (d != null && d.has("timestamp") && (!"order".equals(table) || d.has("ordStatus") && "NEW".equals(d.get("ordStatus").asText()))) { try { String timestamp = d.get("timestamp").asText(); Date date = formatter.parse(timestamp); long delay = current - date.getTime(); for (ObservableEmitter<Long> emitter : delayEmitters) { emitter.onNext(delay); } } catch (ParseException e) { LOG.error("Parsing timestamp error: ", e); } } } }if (message.has("info") || message.has("success")) { return; } if (message.has("error")) { String error = message.get("error").asText(); LOG.error("Error with message: " + error); return; } if (message.has("now") && message.has("cancelTime")) { handleDeadMansSwitchMessage(message); return; <<<<<<< private void handleDeadMansSwitchMessage(JsonNode message) { //handle dead man's switch confirmation try { String cancelTime = message.get("cancelTime").asText(); if (cancelTime.equals("0")) { LOG.info("Dead man's switch disabled"); dmsDisposable.dispose(); dmsDisposable = null; dmsCancelTime=0; } else { SimpleDateFormat sdf = new SimpleDateFormat(BitmexMarketDataEvent.BITMEX_TIMESTAMP_FORMAT); sdf.setTimeZone(TimeZone.getTimeZone(ZoneOffset.UTC)); long now = sdf.parse(message.get("now").asText()).getTime(); dmsCancelTime = sdf.parse(cancelTime).getTime(); } } catch (ParseException e) { LOG.error("Error parsing deadman's confirmation "); } return; } @Override protected WebSocketClientExtensionHandler getWebSocketClientExtensionHandler() { return null; } ======= >>>>>>> private void handleDeadMansSwitchMessage(JsonNode message) { //handle dead man's switch confirmation try { String cancelTime = message.get("cancelTime").asText(); if (cancelTime.equals("0")) { LOG.info("Dead man's switch disabled"); dmsDisposable.dispose(); dmsDisposable = null; dmsCancelTime=0; } else { SimpleDateFormat sdf = new SimpleDateFormat(BitmexMarketDataEvent.BITMEX_TIMESTAMP_FORMAT); sdf.setTimeZone(TimeZone.getTimeZone(ZoneOffset.UTC)); long now = sdf.parse(message.get("now").asText()).getTime(); dmsCancelTime = sdf.parse(cancelTime).getTime(); } } catch (ParseException e) { LOG.error("Error parsing deadman's confirmation "); } return; } @Override protected WebSocketClientExtensionHandler getWebSocketClientExtensionHandler() { return null; } <<<<<<< JsonNode data = message.get("data"); String instrument = data.size() > 0 ? data.get(0).get("symbol").asText() : message.get("filter").get("symbol").asText(); ======= >>>>>>> <<<<<<< public void enableDeadMansSwitch(long rate, long timeout) throws IOException { if (dmsDisposable != null) { LOG.warn("You already have Dead Man's switch enabled. Doing nothing"); return; } final BitmexWebSocketSubscriptionMessage subscriptionMessage = new BitmexWebSocketSubscriptionMessage("cancelAllAfter", new Object[]{DMS_CANCEL_ALL_IN}); String message = objectMapper.writeValueAsString(subscriptionMessage); dmsDisposable = Schedulers.single().schedulePeriodicallyDirect(new Runnable() { @Override public void run() { sendMessage(message); } }, 0, DMS_RESUBSCRIBE, TimeUnit.MILLISECONDS); Schedulers.single().start(); } public void disableDeadMansSwitch() throws IOException { final BitmexWebSocketSubscriptionMessage subscriptionMessage = new BitmexWebSocketSubscriptionMessage("cancelAllAfter", new Object[]{0}); String message = objectMapper.writeValueAsString(subscriptionMessage); sendMessage(message); } public boolean isDeadMansSwitchEnabled() { return dmsCancelTime > 0 && System.currentTimeMillis() < dmsCancelTime; } ======= @Override protected WebSocketClientExtensionHandler getWebSocketClientExtensionHandler() { return null; } public void addDelayEmitter(ObservableEmitter<Long> delayEmitter) { delayEmitters.add(delayEmitter); } >>>>>>> public void enableDeadMansSwitch(long rate, long timeout) throws IOException { if (dmsDisposable != null) { LOG.warn("You already have Dead Man's switch enabled. Doing nothing"); return; } final BitmexWebSocketSubscriptionMessage subscriptionMessage = new BitmexWebSocketSubscriptionMessage("cancelAllAfter", new Object[]{DMS_CANCEL_ALL_IN}); String message = objectMapper.writeValueAsString(subscriptionMessage); dmsDisposable = Schedulers.single().schedulePeriodicallyDirect(new Runnable() { @Override public void run() { sendMessage(message); } }, 0, DMS_RESUBSCRIBE, TimeUnit.MILLISECONDS); Schedulers.single().start(); } public void disableDeadMansSwitch() throws IOException { final BitmexWebSocketSubscriptionMessage subscriptionMessage = new BitmexWebSocketSubscriptionMessage("cancelAllAfter", new Object[]{0}); String message = objectMapper.writeValueAsString(subscriptionMessage); sendMessage(message); } public boolean isDeadMansSwitchEnabled() { return dmsCancelTime > 0 && System.currentTimeMillis() < dmsCancelTime; } public void addDelayEmitter(ObservableEmitter<Long> delayEmitter) { delayEmitters.add(delayEmitter); }
<<<<<<< import java.text.MessageFormat; import java.util.ArrayList; ======= >>>>>>> import java.util.ArrayList; <<<<<<< /** * 新增引用渲染策略 * * @param policy */ public void referencePolicy(ReferenceRenderPolicy<?> policy) { referencePolicies.add(policy); } /** * 获取标签策略 * * @param tagName * 模板名称 * @param sign * 语法 */ ======= // Query Operations >>>>>>> /** * 新增引用渲染策略 * * @param policy */ public void referencePolicy(ReferenceRenderPolicy<?> policy) { referencePolicies.add(policy); } /** * 获取标签策略 * * @param tagName * 模板名称 * @param sign * 语法 */ // Query Operations <<<<<<< public List<ReferenceRenderPolicy<?>> getReferencePolicies() { return referencePolicies; } ======= private RenderPolicy getCustomPolicy(String tagName) { return customPolicys.get(tagName); } private RenderPolicy getDefaultPolicy(Character sign) { return defaultPolicys.get(sign); } >>>>>>> public List<ReferenceRenderPolicy<?>> getReferencePolicies() { return referencePolicies; } private RenderPolicy getCustomPolicy(String tagName) { return customPolicys.get(tagName); } private RenderPolicy getDefaultPolicy(Character sign) { return defaultPolicys.get(sign); }
<<<<<<< ======= >>>>>>> <<<<<<< Log.d(TAG, "DexDrip Data Received!"); ======= >>>>>>>
<<<<<<< import android.util.Log; ======= import android.view.Menu; import android.view.MenuItem; >>>>>>> import android.view.Menu; import android.view.MenuItem; import android.util.Log;
<<<<<<< ======= import java.util.HashSet; import java.util.Set; import javax.inject.Named; import javax.inject.Singleton; import com.flipkart.flux.api.redriver.RedriverRegistry; >>>>>>> <<<<<<< import com.flipkart.flux.guice.annotation.ManagedEnv; ======= import com.flipkart.flux.impl.redriver.AkkaRedriverRegistryImpl; >>>>>>> import com.flipkart.flux.guice.annotation.ManagedEnv; import com.flipkart.flux.api.redriver.RedriverRegistry; import com.flipkart.flux.impl.redriver.AkkaRedriverRegistryImpl; <<<<<<< bind(ExecutableRegistry.class).annotatedWith(ManagedEnv.class).to(TaskExecutableRegistryImpl.class); ======= bind(RedriverRegistry.class).to(AkkaRedriverRegistryImpl.class); >>>>>>> bind(ExecutableRegistry.class).annotatedWith(ManagedEnv.class).to(TaskExecutableRegistryImpl.class); bind(RedriverRegistry.class).to(AkkaRedriverRegistryImpl.class);
<<<<<<< ======= import com.flipkart.flux.api.StateDefinition; >>>>>>> import com.flipkart.flux.api.StateDefinition; <<<<<<< import javax.inject.Singleton; import javax.transaction.Transactional; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; ======= import javax.inject.Named; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import java.util.HashSet; import java.util.Set; >>>>>>> import javax.inject.Singleton; import javax.transaction.Transactional; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.inject.Named; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import java.util.HashSet; import java.util.Set; <<<<<<< @Singleton @Path("/fsm/machines") ======= //todo merge with FluxResource @Path("/api") @Named >>>>>>> @Singleton @Path("/fsm/machines") @Named <<<<<<< @Consumes(MediaType.APPLICATION_JSON) @Transactional public Response createStateMachine(StateMachineDefinition stateMachineDefinition) { ======= @Path("/fsms") public String createStateMachine(StateMachineDefinition stateMachineDefinition) { >>>>>>> @Consumes(MediaType.APPLICATION_JSON) @Transactional public Response createStateMachine(StateMachineDefinition stateMachineDefinition) { <<<<<<< @Path("/{machineId}/context/events/{eventFqn}") ======= @Path("/fsms/{machineId}/context/events/{eventFqn}") >>>>>>> @Path("/{machineId}/context/events/{eventFqn}") <<<<<<< @Path("/{machineId}/cancel") ======= @Path("/fsms/{machineId}/cancel") >>>>>>> @Path("/{machineId}/cancel") <<<<<<< ======= /** * converts state machine definition to state machine domain object and saves in db */ private StateMachine persistStateMachine(StateMachineDefinition stateMachineDefinition) { Set<StateDefinition> stateDefinitions = stateMachineDefinition.getStates(); Set<State<T>> states = new HashSet<State<T>>(); for(StateDefinition stateDefinition : stateDefinitions) { states.add(convertStateDefinitionToState(stateDefinition)); } StateMachine<T> stateMachine = new StateMachine<T>(stateMachineDefinition.getVersion(), stateMachineDefinition.getName(), stateMachineDefinition.getDescription(), states); return stateMachinesDAO.create(stateMachine); } private State<T> convertStateDefinitionToState(StateDefinition stateDefinition) { State<T> state = new State<T>(stateDefinition.getVersion(), stateDefinition.getName(), stateDefinition.getDescription(), stateDefinition.getOnEntryHook(), stateDefinition.getTask(), stateDefinition.getOnExitHook(), stateDefinition.getRetryCount(), stateDefinition.getTimeout()); return state; } >>>>>>>
<<<<<<< /** * ClientElbPersistenceService to retrieve ClientElb URL from ClientElbDAO. * This service searches in in-memory cache first, in case of miss hits DAO/DB layer. * ClientElbUrl is the one where a particular state machines states is supposed to be executed. */ private ClientElbPersistenceService clientElbPersistenceService; /** * Constructor for this class */ ======= /** * Constructor for this class */ >>>>>>> /** * ClientElbPersistenceService to retrieve ClientElb URL from ClientElbDAO. * This service searches in in-memory cache first, in case of miss hits DAO/DB layer. * ClientElbUrl is the one where a particular state machines states is supposed to be executed. */ private ClientElbPersistenceService clientElbPersistenceService; /** * Constructor for this class */ <<<<<<< ======= * >>>>>>> <<<<<<< ======= * >>>>>>> <<<<<<< ======= * >>>>>>> <<<<<<< if(deleteFromRedriver) this.redriverRegistry.deRegisterTask(stateMachineId, taskId); ======= if (deleteFromRedriver) { this.redriverRegistry.deRegisterTask(stateMachineId, taskId); } } /* * Audit entry in AuditRecord. * Default values: [{machineId}, 0, 0, null, null, {EventUpdateAudit} String] */ @Transactional @SelectDataSource(type = DataSourceType.READ_WRITE, storage = Storage.SHARDED) public void updateEventData(String machineId, EventData eventData) { persistEvent(machineId, eventData); String EventUdpateAudit = "Event data updated for event: " + eventData.getName(); this.auditDAO.create(machineId, new AuditRecord(machineId, Long.valueOf(0), Long.valueOf(0), null, null, EventUdpateAudit)); logger.info("Updated event data persisted for event: {} and stateMachineId: {}", eventData.getName(), machineId); >>>>>>> if (deleteFromRedriver) { this.redriverRegistry.deRegisterTask(stateMachineId, taskId); } } /* * Audit entry in AuditRecord. * Default values: [{machineId}, 0, 0, null, null, {EventUpdateAudit} String] */ @Transactional @SelectDataSource(type = DataSourceType.READ_WRITE, storage = Storage.SHARDED) public void updateEventData(String machineId, EventData eventData) { persistEvent(machineId, eventData); String EventUdpateAudit = "Event data updated for event: " + eventData.getName(); this.auditDAO.create(machineId, new AuditRecord(machineId, Long.valueOf(0), Long.valueOf(0), null, null, EventUdpateAudit)); logger.info("Updated event data persisted for event: {} and stateMachineId: {}", eventData.getName(), machineId);
<<<<<<< /** * Constructor for this class. * @param executableRegistry the ExecutableRegistry containing all executable client code */ ======= @Inject >>>>>>> /** * Constructor for this class. * @param executableRegistry the ExecutableRegistry containing all executable client code */ @Inject
<<<<<<< verify(routerRegistry, times(2)).getRouter("com.flipkart.flux.dao.TestWorkflow_TestTask"); // For 2 unblocked states mockActor.underlyingActor().assertMessageReceived(new TaskAndEvents("TestTask", "com.flipkart.flux.dao.TestWorkflow_TestTask_event1", expectedEvents, 1l, objectMapper.writeValueAsString(TestUtils.standardStateMachineOutputEvent()),2), 1); mockActor.underlyingActor().assertMessageReceived(new TaskAndEvents("TestTask", "com.flipkart.flux.dao.TestWorkflow_TestTask_event1", expectedEvents, 1l, null,2), 1); ======= verify(routerRegistry, times(2)).getRouter("someRouter"); // For 2 unblocked states mockActor.underlyingActor().assertMessageReceived(new TaskAndEvents("TestTask", "com.flipkart.flux.dao.TestTask", 1L, expectedEvents, 1l, objectMapper.writeValueAsString(TestUtils.standardStateMachineOutputEvent()),2), 1); mockActor.underlyingActor().assertMessageReceived(new TaskAndEvents("TestTask", "com.flipkart.flux.dao.TestTask", 1L, expectedEvents, 1l, null,2), 1); >>>>>>> verify(routerRegistry, times(2)).getRouter("com.flipkart.flux.dao.TestWorkflow_TestTask"); // For 2 unblocked states mockActor.underlyingActor().assertMessageReceived(new TaskAndEvents("TestTask", "com.flipkart.flux.dao.TestWorkflow_TestTask_event1", 1L, expectedEvents, 1l, objectMapper.writeValueAsString(TestUtils.standardStateMachineOutputEvent()),2), 1); mockActor.underlyingActor().assertMessageReceived(new TaskAndEvents("TestTask", "com.flipkart.flux.dao.TestWorkflow_TestTask_event1", 1L, expectedEvents, 1l, null,2), 1);
<<<<<<< public boolean canConsoleUseCommand() { return false; } @Override public List<String> addTabCompletionOptions(ICommandSender sender, String[] args, BlockPos pos) ======= public List<String> addTabCompletionOptions(ICommandSender sender, String[] args) >>>>>>> public List<String> addTabCompletionOptions(ICommandSender sender, String[] args, BlockPos pos)
<<<<<<< import net.minecraftforge.fml.common.FMLCommonHandler; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.common.gameevent.TickEvent; ======= import net.minecraftforge.fe.event.entity.EntityPortalEvent; >>>>>>> import net.minecraftforge.fml.common.FMLCommonHandler; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.common.gameevent.TickEvent; import net.minecraftforge.fe.event.entity.EntityPortalEvent; <<<<<<< ======= import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.eventhandler.EventPriority; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import cpw.mods.fml.common.gameevent.TickEvent; >>>>>>>
<<<<<<< import java.util.HashMap; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import javax.inject.Inject; import javax.inject.Singleton; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.flipkart.flux.domain.FluxError; import com.flipkart.flux.impl.boot.ActorSystemManager; import com.flipkart.flux.impl.temp.Worker; import com.flipkart.polyguice.core.Initializable; import akka.actor.ActorRef; import akka.actor.ActorSystem; import akka.actor.Address; import akka.actor.PoisonPill; import akka.actor.Props; ======= import akka.actor.ActorRef; import akka.actor.ActorSystem; import akka.actor.Address; import akka.actor.AddressFromURIString; import akka.actor.PoisonPill; import akka.actor.Props; >>>>>>> import java.util.HashMap; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import javax.inject.Inject; import javax.inject.Singleton; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.flipkart.flux.domain.FluxError; import com.flipkart.flux.impl.boot.ActorSystemManager; import com.flipkart.flux.impl.temp.Worker; import com.flipkart.polyguice.core.Initializable; import akka.actor.ActorRef; import akka.actor.ActorSystem; import akka.actor.Address; import akka.actor.PoisonPill; import akka.actor.Props; <<<<<<< ======= import com.flipkart.flux.impl.boot.ActorSystemManager; import com.flipkart.flux.impl.task.CustomSuperviseStrategy; import com.flipkart.flux.impl.temp.Worker; import com.flipkart.polyguice.core.Initializable; >>>>>>> import com.flipkart.flux.impl.boot.ActorSystemManager; import com.flipkart.flux.impl.task.CustomSuperviseStrategy; import com.flipkart.flux.impl.temp.Worker; import com.flipkart.polyguice.core.Initializable; <<<<<<< ======= import javax.inject.Inject; import javax.inject.Singleton; import java.util.HashMap; >>>>>>> import javax.inject.Inject; import javax.inject.Singleton; import java.util.HashMap; <<<<<<< /** Configuration access for Router setup*/ private RouterConfigurationRegistry routerConfigurationRegistry; ======= private final CustomSuperviseStrategy superviseStrategy; RouterConfigurationRegistry routerConfigurationRegistry; >>>>>>> /** Configuration access for Router setup*/ private RouterConfigurationRegistry routerConfigurationRegistry; private final CustomSuperviseStrategy superviseStrategy; RouterConfigurationRegistry routerConfigurationRegistry; <<<<<<< ClusterSingletonManager.props(new ClusterRouterPool(new RoundRobinPool(2), next.getValue()).props( new RemoteRouterConfig(new RoundRobinPool(6), this.memberAddresses).props( ======= ClusterSingletonManager.props(new ClusterRouterPool(new RoundRobinPool(2).withSupervisorStrategy(superviseStrategy.getStrategy()), next.getValue()).props( new RemoteRouterConfig(new RoundRobinPool(6), addresses1).props( >>>>>>> ClusterSingletonManager.props(new ClusterRouterPool(new RoundRobinPool(2).withSupervisorStrategy(superviseStrategy.getStrategy()), next.getValue()).props( new RemoteRouterConfig(new RoundRobinPool(6), addresses1).props(
<<<<<<< ======= import com.flipkart.flux.resource.ClientElbResource; import com.flipkart.flux.resource.DeploymentUnitResource; >>>>>>> import com.flipkart.flux.resource.ClientElbResource; <<<<<<< public ResourceConfig getAPIResourceConfig(StateMachineResource stateMachineResource, StatusResource statusResource, MetricRegistry metricRegistry) { ======= public ResourceConfig getAPIResourceConfig(StateMachineResource stateMachineResource, DeploymentUnitResource deploymentUnitResource, StatusResource statusResource, ClientElbResource clientElbResource, MetricRegistry metricRegistry) { >>>>>>> public ResourceConfig getAPIResourceConfig(StateMachineResource stateMachineResource, StatusResource statusResource, ClientElbResource clientElbResource, MetricRegistry metricRegistry) {
<<<<<<< import com.amazonaws.services.kinesis.model.StreamDescriptionSummary; ======= >>>>>>> import com.amazonaws.services.kinesis.model.StreamDescriptionSummary; <<<<<<< * Wait for a Stream to become available or transition to the indicated status * ======= * Wait for a Stream to become available or transition to the indicated * status * >>>>>>> * Wait for a Stream to become available or transition to the indicated status * <<<<<<< return (List<Shard>) doOperation(kinesisClient, describe, streamName, DESCRIBE_RETRIES, false); } ======= return (ListShardsResult) doOperation(kinesisClient, describe, streamName, DESCRIBE_RETRIES, false); >>>>>>> return (List<Shard>) doOperation(kinesisClient, describe, streamName, DESCRIBE_RETRIES, false); } <<<<<<< Shard s = getShard(kinesisClient, streamName, shardId); ======= ListShardsResult listShardsResult = listShards(kinesisClient, streamName, shardId); Shard s = listShardsResult.getShards().get(0); >>>>>>> Shard s = getShard(kinesisClient, streamName, shardId); <<<<<<< ======= ListShardsResult listShardsResult = null; >>>>>>> <<<<<<< ======= // load all shards on the stream List<Shard> allShards = new ArrayList<>(); do { listShardsResult = listShards(kinesisClient, streamName, lastShardId); for (Shard shard : listShardsResult.getShards()) { allShards.add(shard); lastShardId = shard.getShardId(); } } while (/* old describeStream call used to return nothing sometimes, being defensive with ListShards responses as well*/ listShardsResult == null || listShardsResult.getShards() == null || listShardsResult.getShards().size() == 0 || listShardsResult.getNextToken() != null); >>>>>>>
<<<<<<< import com.googlecode.goclipse.Activator; import com.googlecode.goclipse.debug.ui.GoToggleBreakpointAdapter; ======= import org.eclipse.cdt.debug.internal.ui.actions.breakpoints.ToggleDynamicPrintfAdapter; import melnorme.lang.ide.ui.LangUIPlugin; import MMRNMHRM_ID.debug.ui.ToggleBreakpointAdapter; >>>>>>> import org.eclipse.cdt.debug.internal.ui.actions.breakpoints.ToggleDynamicPrintfAdapter; import com.googlecode.goclipse.Activator; import com.googlecode.goclipse.debug.ui.GoToggleBreakpointAdapter;
<<<<<<< public class CommandHeal extends FEcmdModuleCommands ======= import cpw.mods.fml.common.FMLCommonHandler; public class CommandHeal extends ForgeEssentialsCommandBase >>>>>>> public class CommandHeal extends ForgeEssentialsCommandBase
<<<<<<< public static final String EDITOR_CONTEXT = "#GoEditorContext"; public static final String RULER_CONTEXT = "#GoEditorRulerContext"; ======= public static final String ROOT_PREF_PAGE_ID = PLUGIN_ID + ".PreferencePages.Root"; public static final String RULER_CONTEXT = "#LANGUAGE_RulerContext"; public static final String EDITOR_CONTEXT = "#LANGUAGE_EditorContext"; >>>>>>> public static final String ROOT_PREF_PAGE_ID = PLUGIN_ID + ".PreferencePages.Root"; public static final String EDITOR_CONTEXT = "#GoEditorContext"; public static final String RULER_CONTEXT = "#GoEditorRulerContext";
<<<<<<< ======= import melnorme.lang.tooling.data.LANGUAGE_SDKLocationValidator; >>>>>>> <<<<<<< protected GoSDKLocationValidator getSDKLocationValidator() { return new GoSDKLocationValidator(); ======= public LANGUAGE_SDKLocationValidator getSDKLocationValidator() { return new LANGUAGE_SDKLocationValidator(); >>>>>>> public GoSDKLocationValidator getSDKLocationValidator() { return new GoSDKLocationValidator();